diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..a56f2d2
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,10 @@
+# https://editorconfig.org
+root = true
+
+[*]
+indent_style = space
+indent_size = 2
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
diff --git a/.envrc b/.envrc
new file mode 100644
index 0000000..3550a30
--- /dev/null
+++ b/.envrc
@@ -0,0 +1 @@
+use flake
diff --git a/.eslintrc.json b/.eslintrc.json
new file mode 100644
index 0000000..5966041
--- /dev/null
+++ b/.eslintrc.json
@@ -0,0 +1,74 @@
+{
+  "plugins": ["@typescript-eslint"],
+  "extends": ["plugin:github/recommended"],
+  "parser": "@typescript-eslint/parser",
+  "parserOptions": {
+    "ecmaVersion": 9,
+    "sourceType": "module",
+    "project": "./tsconfig.json"
+  },
+  "settings": {
+    "import/resolver": {
+      "typescript": {}
+    }
+  },
+  "rules": {
+    "i18n-text/no-en": "off",
+    "eslint-comments/no-use": "off",
+    "import/no-namespace": "off",
+    "no-unused-vars": "off",
+    "@typescript-eslint/no-unused-vars": [
+      "error",
+      {
+        "argsIgnorePattern": "^_"
+      }
+    ],
+    "@typescript-eslint/explicit-member-accessibility": [
+      "error",
+      {
+        "accessibility": "no-public"
+      }
+    ],
+    "@typescript-eslint/no-base-to-string": "error",
+    "@typescript-eslint/no-require-imports": "error",
+    "@typescript-eslint/array-type": "error",
+    "@typescript-eslint/await-thenable": "error",
+    "@typescript-eslint/ban-ts-comment": "error",
+    "camelcase": "error",
+    "@typescript-eslint/consistent-type-assertions": "error",
+    "@typescript-eslint/explicit-function-return-type": [
+      "error",
+      {
+        "allowExpressions": true
+      }
+    ],
+    "@typescript-eslint/func-call-spacing": ["error", "never"],
+    "@typescript-eslint/no-array-constructor": "error",
+    "@typescript-eslint/no-empty-interface": "error",
+    "@typescript-eslint/no-explicit-any": "error",
+    "@typescript-eslint/no-floating-promises": "error",
+    "@typescript-eslint/no-extraneous-class": "error",
+    "@typescript-eslint/no-for-in-array": "error",
+    "@typescript-eslint/no-inferrable-types": "error",
+    "@typescript-eslint/no-misused-new": "error",
+    "@typescript-eslint/no-namespace": "error",
+    "@typescript-eslint/no-non-null-assertion": "warn",
+    "@typescript-eslint/no-unnecessary-qualifier": "error",
+    "@typescript-eslint/no-unnecessary-type-assertion": "error",
+    "@typescript-eslint/no-useless-constructor": "error",
+    "@typescript-eslint/no-var-requires": "error",
+    "@typescript-eslint/prefer-for-of": "warn",
+    "@typescript-eslint/prefer-function-type": "warn",
+    "@typescript-eslint/prefer-includes": "error",
+    "@typescript-eslint/prefer-string-starts-ends-with": "error",
+    "@typescript-eslint/promise-function-async": "error",
+    "@typescript-eslint/require-array-sort-compare": "error",
+    "@typescript-eslint/restrict-plus-operands": "error",
+    "@typescript-eslint/type-annotation-spacing": "error",
+    "@typescript-eslint/unbound-method": "error"
+  },
+  "env": {
+    "node": true,
+    "es6": true
+  }
+}
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..1137db1
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,11 @@
+##### Description
+
+<!---
+Please include a short description of what your PR does and / or the motivation
+behind it
+--->
+
+##### Checklist
+
+- [ ] Tested functionality against a test repository (see ["How to test changes"](../README.md#how-to-test-changes))
+- [ ] Added or updated relevant documentation (leave unchecked if not applicable)
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..1230149
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+  - package-ecosystem: "github-actions"
+    directory: "/"
+    schedule:
+      interval: "daily"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..aa40a10
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,33 @@
+name: CI
+on:
+  pull_request:
+  push:
+    branches: [main]
+
+jobs:
+  typescript-action:
+    runs-on: ubuntu-22.04
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@main
+      - name: Enable magic Nix cache
+        uses: DeterminateSystems/magic-nix-cache-action@main
+      - name: Install pnpm dependencies
+        run: nix develop --command pnpm install
+      - name: Check formatting
+        run: nix develop --command pnpm run check-fmt
+      - name: Lint
+        run: nix develop --command pnpm run lint
+      - name: Build
+        run: nix develop --command pnpm run build
+      - name: Run test suite
+        run: nix develop --command pnpm run test
+      - name: Package
+        run: nix develop --command pnpm run package
+      - name: Check git status
+        run: git status --porcelain=v1
+      - name: Ensure no staged changes
+        run: git diff --exit-code
diff --git a/.github/workflows/update.yml b/.github/workflows/update.yml
new file mode 100644
index 0000000..c1b3816
--- /dev/null
+++ b/.github/workflows/update.yml
@@ -0,0 +1,22 @@
+name: update-flake-lock
+on:
+  workflow_dispatch:
+  schedule:
+    - cron: "0 0 * * 0"
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-22.04
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@main
+      - name: Enable magic Nix cache
+        uses: DeterminateSystems/magic-nix-cache-action@main
+      - name: Check flake
+        uses: DeterminateSystems/flake-checker-action@main
+      - name: Update flake.lock
+        uses: ./.
+        with:
+          _internal-strict-mode: true
diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml
new file mode 100644
index 0000000..a5de3ce
--- /dev/null
+++ b/.github/workflows/validate.yml
@@ -0,0 +1,20 @@
+name: CI
+on:
+  pull_request:
+  push:
+    branches: [main]
+
+jobs:
+  validate:
+    runs-on: ubuntu-22.04
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+      - name: Validate YAML
+        uses: nwisbeta/validate-yaml-schema@v2.0.0
+        with:
+          yamlSchemasJson: |
+            {
+              "https://json.schemastore.org/github-action.json": ["action.yml"]
+            }
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..0487fab
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+# JS dependencies
+node_modules/
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000..dddb2a8
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,5 @@
+dist/
+lib/
+node_modules/
+pnpm-lock.yaml
+README.md
diff --git a/README.md b/README.md
index b4170a2..51b4d46 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
 
 This is a GitHub Action that will update your flake.lock file whenever it is run.
 
-> NOTE: We hardcode the `install_url` to a relatively recent `nixUnstable` (`nix-2.5pre20211015_130284b` currently). If you need a newer version that includes a new feature or important bug fix, feel free to file an issue or send a PR bumping the `install_url` inside the [`action.yml`](action.yml)!
+> **NOTE:** As of v3, this action will no longer automatically install Nix to the action runner. You **MUST** set up a Nix with flakes support enabled prior to running this action, or your workflow will not function as expected.
 
 ## Example
 
@@ -11,16 +11,305 @@ An example GitHub Action workflow using this action would look like the followin
 ```yaml
 name: update-flake-lock
 on:
-  workflow_dispatch:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
 
 jobs:
   lockfile:
     runs-on: ubuntu-latest
     steps:
       - name: Checkout repository
-        uses: actions/checkout@v2
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@main
       - name: Update flake.lock
         uses: DeterminateSystems/update-flake-lock@main
+        with:
+          pr-title: "Update flake.lock" # Title of PR to be created
+          pr-labels: |                  # Labels to be set on the PR
+            dependencies
+            automated
 ```
 
-To have this workflow run on a recurring basis, see the [GitHub Actions documentation on the `schedule` key](https://docs.github.com/en/actions/learn-github-actions/events-that-trigger-workflows#schedule).
+## Example updating specific input(s)
+
+> **NOTE**: If any inputs have a stale reference (e.g. the lockfile thinks a git input wants its "ref" to be "nixos-unstable", but the flake.nix specifies "nixos-unstable-small"), they will also be updated. At this time, there is no known workaround.
+
+It is also possible to update specific inputs by specifying them in a space-separated list:
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          inputs: input1 input2 input3
+```
+
+## Example adding options to nix command
+
+It is also possible to use specific options to the nix command in a space separated list:
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          nix-options: --debug --log-format raw
+```
+
+## Example that prints the number of the created PR
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        id: update
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          inputs: input1 input2 input3
+      - name: Print PR number
+        run: echo Pull request number is ${{ steps.update.outputs.pull-request-number }}.
+```
+
+## Example that doesn't run on PRs
+
+If you were to run this action as a part of your CI workflow, you may want to prevent it from running against Pull Requests.
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  pull_request: # triggers on every Pull Request
+  schedule:
+    - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        if: ${{ github.event_name != 'pull_request' }}
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          inputs: input1 input2 input3
+          path-to-flake-dir: 'nix/' # in this example our flake doesn't sit at the root of the repository, it sits under 'nix/flake.nix'
+```
+
+## Example using a different Git user
+
+If you want to change the author and / or committer of the flake.lock update commit, you can tweak the `git-{author,committer}-{name,email}` options:
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          git-author-name: 'Jane Author'
+          git-author-email: 'github-actions[bot]@users.noreply.github.com'
+          git-committer-name: 'John Committer'
+          git-committer-email: 'github-actions[bot]@users.noreply.github.com'
+```
+
+## Running GitHub Actions CI
+
+GitHub Actions will not run workflows when a branch is pushed by or a PR is opened by a GitHub Action. There are two ways to have GitHub Actions CI run on a PR submitted by this action.
+
+### Without a Personal Authentication Token
+
+Without using a Personal Authentication Token, you can manually run the following to kick off a CI run:
+
+```
+git branch -D update_flake_lock_action
+git fetch origin
+git checkout update_flake_lock_action
+git commit --amend --no-edit
+git push origin update_flake_lock_action --force
+```
+
+### With a Personal Authentication Token
+
+By providing a Personal Authentication Token, the PR will be submitted in a way that bypasses this limitation (GitHub will essentially think it is the owner of the PAT submitting the PR, and not an Action).
+You can create a token by visiting https://github.com/settings/tokens and select at least the `repo` scope. For the new fine-grained tokens, you need to enable read and write access for "Contents" and "Pull Requests" permissions. Then, store this token in your repository secrets (i.e. `https://github.com/<USER>/<REPO>/settings/secrets/actions`) as `GH_TOKEN_FOR_UPDATES` and set up your workflow file like the following:
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 1,4' # Run twice a week
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          token: ${{ secrets.GH_TOKEN_FOR_UPDATES }}
+```
+
+## With GPG commit signing
+
+It's possible for the bot to produce GPG signed commits. Associating a GPG public key to a github user account is not required but it is necessary if you want the signed commits to appear as verified in Github. This can be a compliance requirement in some cases.
+
+You can follow [Github's guide on creating and/or adding a new GPG key to an user account](https://docs.github.com/en/authentication/managing-commit-signature-verification/adding-a-new-gpg-key-to-your-github-account). Using a specific github user account for the bot can be a good security measure to dissociate this bot's actions and commits from your personal github account.
+
+For the bot to produce signed commits, you will have to provide the GPG private keys to this action's input parameters. You can safely do that with [Github secrets as explained here](https://github.com/crazy-max/ghaction-import-gpg#prerequisites).
+
+When using commit signing, the commit author name and email for the commits produced by this bot would correspond to the ones associated to the GPG Public Key.
+
+If you want to sign using a subkey, you must specify the subkey fingerprint using the `gpg-fingerprint` input parameter.
+
+You can find an example of how to using this action with commit signing below:
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 1,4' # Run twice a week
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          sign-commits: true
+          gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
+          gpg-fingerprint: ${{ secrets.GPG_FINGERPRINT }} # specify subkey fingerprint (optional)
+          gpg-passphrase: ${{ secrets.GPG_PASSPHRASE }}
+```
+
+## Custom PR Body
+
+By default the generated PR body is set to be the following template:
+
+````handlebars
+Automated changes by the [update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock) GitHub Action.
+
+```
+{{ env.GIT_COMMIT_MESSAGE }}
+```
+
+### Running GitHub Actions on this PR
+
+GitHub Actions will not run workflows on pull requests which are opened by a GitHub Action.
+
+To run GitHub Actions workflows on this PR, run:
+
+```sh
+git branch -D update_flake_lock_action
+git fetch origin
+git checkout update_flake_lock_action
+git commit --amend --no-edit
+git push origin update_flake_lock_action --force
+```
+````
+
+However you can customize it, with variable interpolation performed with [Handlebars](https://handlebarsjs.com/). This allows you to customize the template with the following variables:
+- env.GIT_AUTHOR_NAME
+- env.GIT_AUTHOR_EMAIL
+- env.GIT_COMMITTER_NAME
+- env.GIT_COMMITTER_EMAIL
+- env.GIT_COMMIT_MESSAGE
+
+## Add assignees or reviewers
+
+You can assign the PR to or request a review from one or more GitHub users with `pr-assignees` and `pr-reviewers`, respectively.
+These properties expect a comma or newline separated list of GitHub usernames:
+
+```yaml
+name: update-flake-lock
+on:
+  workflow_dispatch: # allows manual triggering
+  schedule:
+    - cron: '0 0 * * 1,4' # Run twice a week
+
+jobs:
+  lockfile:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v4
+      - name: Install Nix
+        uses: DeterminateSystems/nix-installer-action@v1
+      - name: Update flake.lock
+        uses: DeterminateSystems/update-flake-lock@vX
+        with:
+          pr-assignees: SomeGitHubUsername
+          pr-reviewers: SomeOtherGitHubUsername,SomeThirdGitHubUsername
+```
+
+## Contributing
+
+Feel free to send a PR or open an issue if you find something functions unexpectedly! Please make sure to test your changes and update any related documentation before submitting your PR.
+
+### How to test changes
+
+In order to more easily test your changes to this action, we have created a template repository that should point you in the right direction: https://github.com/DeterminateSystems/update-flake-lock-test-template. Please see the README in that repository for instructions on testing your changes.
diff --git a/action.yml b/action.yml
index c0a463c..967045d 100644
--- a/action.yml
+++ b/action.yml
@@ -1,24 +1,226 @@
-name: 'Update flake.lock'
-description: 'Update your flake.lock and send a PR'
+name: "Update Nix Flake Lock"
+description: "Update your Nix flake.lock and send a PR"
+inputs:
+  inputs:
+    description: "A space-separated list of inputs to update. Leave empty to update all inputs."
+    required: false
+    default: ""
+  token:
+    description: "GITHUB_TOKEN or a `repo` scoped Personal Access Token (PAT)"
+    required: false
+    default: ${{ github.token }}
+  commit-msg:
+    description: "The message provided with the commit"
+    required: false
+    default: "flake.lock: Update"
+  base:
+    description: "Sets the pull request base branch. Defaults to the branch checked out in the workflow."
+    required: false
+  branch:
+    description: "The branch of the PR to be created"
+    required: false
+    default: "update_flake_lock_action"
+  path-to-flake-dir:
+    description: "The path of the directory containing `flake.nix` file within your repository. Useful when `flake.nix` cannot reside at the root of your repository."
+    required: false
+  pr-title:
+    description: "The title of the PR to be created"
+    required: false
+    default: "flake.lock: Update"
+  pr-body:
+    description: "The body of the PR to be created"
+    required: false
+    default: |
+      Automated changes by the [update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock) GitHub Action.
+
+      ```
+      {{ env.GIT_COMMIT_MESSAGE }}
+      ```
+
+      ### Running GitHub Actions on this PR
+
+      GitHub Actions will not run workflows on pull requests which are opened by a GitHub Action.
+
+      To run GitHub Actions workflows on this PR, run:
+
+      ```sh
+      git branch -D update_flake_lock_action
+      git fetch origin
+      git checkout update_flake_lock_action
+      git commit --amend --no-edit
+      git push origin update_flake_lock_action --force
+      ```
+
+  pr-labels:
+    description: "A comma or newline separated list of labels to set on the Pull Request to be created"
+    required: false
+    default: ""
+  pr-assignees:
+    description: "A comma or newline separated list of assignees (GitHub usernames)."
+    required: false
+    default: ""
+  pr-reviewers:
+    description: "A comma or newline separated list of reviewers (GitHub usernames) to request a review from."
+    required: false
+    default: ""
+  git-author-name:
+    description: "Author name used for commit. Only used if sign-commits is false."
+    required: false
+    default: "github-actions[bot]"
+  git-author-email:
+    description: "Author email used for commit. Only used if sign-commits is false."
+    required: false
+    default: "github-actions[bot]@users.noreply.github.com"
+  git-committer-name:
+    description: "Committer name used for commit. Only used if sign-commits is false."
+    required: false
+    default: "github-actions[bot]"
+  git-committer-email:
+    description: "Committer email used for commit. Only used if sign-commits is false."
+    required: false
+    default: "github-actions[bot]@users.noreply.github.com"
+  sign-commits:
+    description: "Set to true if the action should sign the commit with GPG"
+    required: false
+    default: "false"
+  gpg-private-key:
+    description: "GPG Private Key with which to sign the commits in the PR to be created"
+    required: false
+    default: ""
+  gpg-fingerprint:
+    description: "Fingerprint of specific GPG subkey to use"
+    required: false
+  gpg-passphrase:
+    description: "GPG Private Key Passphrase for the GPG Private Key with which to sign the commits in the PR to be created"
+    required: false
+    default: ""
+  nix-options:
+    description: "A space-separated list of options to pass to the nix command"
+    required: false
+    default: ""
+  _internal-strict-mode:
+    description: Whether to fail when any errors are thrown. Used only to test the Action; do not set this in your own workflows.
+    required: false
+    default: false
+outputs:
+  pull-request-number:
+    description: "The number of the opened pull request"
+    value: ${{ steps.create-pr.outputs.pull-request-number }}
+  pull-request-url:
+    description: "The The URL of the opened pull request."
+    value: ${{ steps.create-pr.outputs.pull-request-url }}
+  pull-request-operation:
+    description: "The pull request operation performed by the action, `created`, `updated` or `closed`."
+    value: ${{ steps.create-pr.outputs.pull-request-operation }}
 runs:
   using: "composite"
   steps:
-    - uses: cachix/install-nix-action@v14
+    - name: Import bot's GPG key for signing commits
+      if: ${{ inputs.sign-commits == 'true' }}
+      id: import-gpg
+      uses: crazy-max/ghaction-import-gpg@01dd5d3ca463c7f10f7f4f7b4f177225ac661ee4 # v6.1.0
       with:
-        install_url: https://github.com/numtide/nix-unstable-installer/releases/download/nix-2.5pre20211015_130284b/install
-        extra_nix_config: |
-          experimental-features = nix-command flakes
-    - run: nix flake update --commit-lock-file
+        gpg_private_key: ${{ inputs.gpg-private-key }}
+        fingerprint: ${{ inputs.gpg-fingerprint }}
+        passphrase: ${{ inputs.gpg-passphrase }}
+        git_config_global: true
+        git_user_signingkey: true
+        git_commit_gpgsign: true
+    - name: Set environment variables (signed commits)
+      if: ${{ inputs.sign-commits == 'true' }}
       shell: bash
       env:
-        GIT_AUTHOR_NAME: github-actions[bot]
-        GIT_AUTHOR_EMAIL: <github-actions[bot]@users.noreply.github.com>
-        GIT_COMMITTER_NAME: github-actions[bot]
-        GIT_COMMITTER_EMAIL: <github-actions[bot]@users.noreply.github.com>
-    - name: Create PR
-      uses: peter-evans/create-pull-request@v3
+        GIT_AUTHOR_NAME: ${{ steps.import-gpg.outputs.name }}
+        GIT_AUTHOR_EMAIL: ${{ steps.import-gpg.outputs.email }}
+        GIT_COMMITTER_NAME: ${{ steps.import-gpg.outputs.name }}
+        GIT_COMMITTER_EMAIL: ${{ steps.import-gpg.outputs.email }}
+        TARGETS: ${{ inputs.inputs }}
+      run: |
+        echo "GIT_AUTHOR_NAME=$GIT_AUTHOR_NAME" >> $GITHUB_ENV
+        echo "GIT_AUTHOR_EMAIL=<$GIT_AUTHOR_EMAIL>" >> $GITHUB_ENV
+        echo "GIT_COMMITTER_NAME=$GIT_COMMITTER_NAME" >> $GITHUB_ENV
+        echo "GIT_COMMITTER_EMAIL=<$GIT_COMMITTER_EMAIL>" >> $GITHUB_ENV
+    - name: Set environment variables (unsigned commits)
+      if: ${{ inputs.sign-commits != 'true' }}
+      shell: bash
+      run: |
+        echo "GIT_AUTHOR_NAME=${{ inputs.git-author-name }}" >> $GITHUB_ENV
+        echo "GIT_AUTHOR_EMAIL=<${{ inputs.git-author-email }}>" >> $GITHUB_ENV
+        echo "GIT_COMMITTER_NAME=${{ inputs.git-committer-name }}" >> $GITHUB_ENV
+        echo "GIT_COMMITTER_EMAIL=<${{ inputs.git-committer-email }}>" >> $GITHUB_ENV
+    - name: Run update-flake-lock
+      shell: bash
+      run: node "$GITHUB_ACTION_PATH/dist/index.js"
+      env:
+        # The following manually exposes all of the action inputs into INPUT_ environment variables so actionsCore.getInput works:
+        # https://github.com/actions/toolkit/blob/ae38557bb0dba824cdda26ce787bd6b66cf07a83/packages/core/src/core.ts#L126
+        INPUT_BASE: ${{ inputs.base }}
+        INPUT_BRANCH: ${{ inputs.branch }}
+        INPUT_COMMIT-MSG: ${{ inputs.commit-msg }}
+        INPUT_GIT-AUTHOR-EMAIL: ${{ inputs.git-author-email }}
+        INPUT_GIT-AUTHOR-NAME: ${{ inputs.git-author-name }}
+        INPUT_GIT-COMMITTER-EMAIL: ${{ inputs.git-committer-email }}
+        INPUT_GIT-COMMITTER-NAME: ${{ inputs.git-committer-name }}
+        INPUT_GPG-FINGERPRINT: ${{ inputs.gpg-fingerprint }}
+        INPUT_GPG-PASSPHRASE: ${{ inputs.gpg-passphrase }}
+        INPUT_GPG-PRIVATE-KEY: ${{ inputs.gpg-private-key }}
+        INPUT_INPUTS: ${{ inputs.inputs }}
+        INPUT_NIX-OPTIONS: ${{ inputs.nix-options }}
+        INPUT_PATH-TO-FLAKE-DIR: ${{ inputs.path-to-flake-dir }}
+        INPUT_PR-ASSIGNEES: ${{ inputs.pr-assignees }}
+        INPUT_PR-BODY: ${{ inputs.pr-body }}
+        INPUT_PR-LABELS: ${{ inputs.pr-labels }}
+        INPUT_PR-REVIEWERS: ${{ inputs.pr-reviewers }}
+        INPUT_PR-TITLE: ${{ inputs.pr-title }}
+        INPUT_PULL-REQUEST-NUMBER: ${{ inputs.pull-request-number }}
+        INPUT_PULL-REQUEST-OPERATION: ${{ inputs.pull-request-operation }}
+        INPUT_SIGN-COMMITS: ${{ inputs.sign-commits }}
+        INPUT_TOKEN: ${{ inputs.token }}
+        INPUT__INTERNAL-STRICT-MODE: ${{ inputs._internal-strict-mode }}
+    - name: Save PR Body as file
+      uses: DamianReeves/write-file-action@v1.3
       with:
-        branch: update_flake_lock_action
+        path: pr_body.template
+        contents: ${{ inputs.pr-body }}
+      env: {}
+    - name: Set additional env variables (GIT_COMMIT_MESSAGE)
+      shell: bash
+      run: |
+        DELIMITER=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
+        COMMIT_MESSAGE="$(git log --format=%b -n 1)"
+        echo "GIT_COMMIT_MESSAGE<<$DELIMITER" >> $GITHUB_ENV
+        echo "$COMMIT_MESSAGE" >> $GITHUB_ENV
+        echo "$DELIMITER" >> $GITHUB_ENV
+        echo "GIT_COMMIT_MESSAGE is: ${COMMIT_MESSAGE}"
+    - name: Interpolate PR Body
+      uses: pedrolamas/handlebars-action@2995d7eadacbc8f2f6ab8431a01d84a5fa3b8bb4 # v2.4.0
+      with:
+        files: "pr_body.template"
+        output-filename: "pr_body.txt"
+    - name: Read pr_body.txt
+      id: pr_body
+      uses: juliangruber/read-file-action@v1
+      with:
+        path: "pr_body.txt"
+    # We need to remove the pr_body files so that the
+    # peter-evans/create-pull-request action does not commit it (the
+    # action commits all new and modified files).
+    - name: Remove PR body template files
+      shell: bash
+      run: rm -f pr_body.txt pr_body.template
+    - name: Create PR
+      id: create-pr
+      # uses: peter-evans/create-pull-request@main
+      uses: peter-evans/create-pull-request@v6.0.1
+      with:
+        base: "${{ inputs.base }}"
+        branch: "${{ inputs.branch }}"
         delete-branch: true
-        title: "flake.lock: Update"
-        body: Automated changes by the [update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock) GitHub Action.
+        committer: "${{ env.GIT_COMMITTER_NAME }} ${{ env.GIT_COMMITTER_EMAIL }}"
+        author: "${{ env.GIT_AUTHOR_NAME }} ${{ env.GIT_AUTHOR_EMAIL }}"
+        title: "${{ inputs.pr-title }}"
+        token: "${{ inputs.token }}"
+        assignees: "${{ inputs.pr-assignees }}"
+        labels: "${{ inputs.pr-labels }}"
+        reviewers: "${{ inputs.pr-reviewers }}"
+        body: "${{ steps.pr_body.outputs.content }}"
diff --git a/dist/index.d.ts b/dist/index.d.ts
new file mode 100644
index 0000000..223e65e
--- /dev/null
+++ b/dist/index.d.ts
@@ -0,0 +1,2 @@
+
+export {  }
diff --git a/dist/index.js b/dist/index.js
new file mode 100644
index 0000000..0f88324
--- /dev/null
+++ b/dist/index.js
@@ -0,0 +1,84421 @@
+import { createRequire as __WEBPACK_EXTERNAL_createRequire } from "module";
+/******/ var __webpack_modules__ = ({
+
+/***/ 5500:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+const path = __importStar(__nccwpck_require__(6928));
+const utils = __importStar(__nccwpck_require__(1675));
+const cacheHttpClient = __importStar(__nccwpck_require__(7443));
+const tar_1 = __nccwpck_require__(3177);
+class ValidationError extends Error {
+    constructor(message) {
+        super(message);
+        this.name = 'ValidationError';
+        Object.setPrototypeOf(this, ValidationError.prototype);
+    }
+}
+exports.ValidationError = ValidationError;
+class ReserveCacheError extends Error {
+    constructor(message) {
+        super(message);
+        this.name = 'ReserveCacheError';
+        Object.setPrototypeOf(this, ReserveCacheError.prototype);
+    }
+}
+exports.ReserveCacheError = ReserveCacheError;
+function checkPaths(paths) {
+    if (!paths || paths.length === 0) {
+        throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
+    }
+}
+function checkKey(key) {
+    if (key.length > 512) {
+        throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);
+    }
+    const regex = /^[^,]*$/;
+    if (!regex.test(key)) {
+        throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
+    }
+}
+/**
+ * isFeatureAvailable to check the presence of Actions cache service
+ *
+ * @returns boolean return true if Actions cache service feature is available, otherwise false
+ */
+function isFeatureAvailable() {
+    return !!process.env['ACTIONS_CACHE_URL'];
+}
+exports.isFeatureAvailable = isFeatureAvailable;
+/**
+ * Restores cache from keys
+ *
+ * @param paths a list of file paths to restore from the cache
+ * @param primaryKey an explicit key for restoring the cache
+ * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
+ * @param downloadOptions cache download options
+ * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
+ * @returns string returns the key for the cache hit, otherwise returns undefined
+ */
+function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
+    return __awaiter(this, void 0, void 0, function* () {
+        checkPaths(paths);
+        restoreKeys = restoreKeys || [];
+        const keys = [primaryKey, ...restoreKeys];
+        core.debug('Resolved Keys:');
+        core.debug(JSON.stringify(keys));
+        if (keys.length > 10) {
+            throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
+        }
+        for (const key of keys) {
+            checkKey(key);
+        }
+        const compressionMethod = yield utils.getCompressionMethod();
+        let archivePath = '';
+        try {
+            // path are needed to compute version
+            const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
+                compressionMethod,
+                enableCrossOsArchive
+            });
+            if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
+                // Cache not found
+                return undefined;
+            }
+            if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
+                core.info('Lookup only - skipping download');
+                return cacheEntry.cacheKey;
+            }
+            archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
+            core.debug(`Archive Path: ${archivePath}`);
+            // Download the cache from the cache entry
+            yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
+            if (core.isDebug()) {
+                yield (0, tar_1.listTar)(archivePath, compressionMethod);
+            }
+            const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
+            core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
+            yield (0, tar_1.extractTar)(archivePath, compressionMethod);
+            core.info('Cache restored successfully');
+            return cacheEntry.cacheKey;
+        }
+        catch (error) {
+            const typedError = error;
+            if (typedError.name === ValidationError.name) {
+                throw error;
+            }
+            else {
+                // Supress all non-validation cache related errors because caching should be optional
+                core.warning(`Failed to restore: ${error.message}`);
+            }
+        }
+        finally {
+            // Try to delete the archive to save space
+            try {
+                yield utils.unlinkFile(archivePath);
+            }
+            catch (error) {
+                core.debug(`Failed to delete archive: ${error}`);
+            }
+        }
+        return undefined;
+    });
+}
+exports.restoreCache = restoreCache;
+/**
+ * Saves a list of files with the specified key
+ *
+ * @param paths a list of file paths to be cached
+ * @param key an explicit key for restoring the cache
+ * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
+ * @param options cache upload options
+ * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
+ */
+function saveCache(paths, key, options, enableCrossOsArchive = false) {
+    var _a, _b, _c, _d, _e;
+    return __awaiter(this, void 0, void 0, function* () {
+        checkPaths(paths);
+        checkKey(key);
+        const compressionMethod = yield utils.getCompressionMethod();
+        let cacheId = -1;
+        const cachePaths = yield utils.resolvePaths(paths);
+        core.debug('Cache Paths:');
+        core.debug(`${JSON.stringify(cachePaths)}`);
+        if (cachePaths.length === 0) {
+            throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
+        }
+        const archiveFolder = yield utils.createTempDirectory();
+        const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
+        core.debug(`Archive Path: ${archivePath}`);
+        try {
+            yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
+            if (core.isDebug()) {
+                yield (0, tar_1.listTar)(archivePath, compressionMethod);
+            }
+            const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
+            const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
+            core.debug(`File Size: ${archiveFileSize}`);
+            // For GHES, this check will take place in ReserveCache API with enterprise file size limit
+            if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
+                throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
+            }
+            core.debug('Reserving Cache');
+            const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
+                compressionMethod,
+                enableCrossOsArchive,
+                cacheSize: archiveFileSize
+            });
+            if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
+                cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId;
+            }
+            else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) {
+                throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`);
+            }
+            else {
+                throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
+            }
+            core.debug(`Saving Cache (ID: ${cacheId})`);
+            yield cacheHttpClient.saveCache(cacheId, archivePath, options);
+        }
+        catch (error) {
+            const typedError = error;
+            if (typedError.name === ValidationError.name) {
+                throw error;
+            }
+            else if (typedError.name === ReserveCacheError.name) {
+                core.info(`Failed to save: ${typedError.message}`);
+            }
+            else {
+                core.warning(`Failed to save: ${typedError.message}`);
+            }
+        }
+        finally {
+            // Try to delete the archive to save space
+            try {
+                yield utils.unlinkFile(archivePath);
+            }
+            catch (error) {
+                core.debug(`Failed to delete archive: ${error}`);
+            }
+        }
+        return cacheId;
+    });
+}
+exports.saveCache = saveCache;
+//# sourceMappingURL=cache.js.map
+
+/***/ }),
+
+/***/ 7443:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+const http_client_1 = __nccwpck_require__(787);
+const auth_1 = __nccwpck_require__(3673);
+const crypto = __importStar(__nccwpck_require__(6982));
+const fs = __importStar(__nccwpck_require__(9896));
+const url_1 = __nccwpck_require__(7016);
+const utils = __importStar(__nccwpck_require__(1675));
+const downloadUtils_1 = __nccwpck_require__(5195);
+const options_1 = __nccwpck_require__(8820);
+const requestUtils_1 = __nccwpck_require__(7342);
+const versionSalt = '1.0';
+function getCacheApiUrl(resource) {
+    const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
+    if (!baseUrl) {
+        throw new Error('Cache Service Url not found, unable to restore cache.');
+    }
+    const url = `${baseUrl}_apis/artifactcache/${resource}`;
+    core.debug(`Resource Url: ${url}`);
+    return url;
+}
+function createAcceptHeader(type, apiVersion) {
+    return `${type};api-version=${apiVersion}`;
+}
+function getRequestOptions() {
+    const requestOptions = {
+        headers: {
+            Accept: createAcceptHeader('application/json', '6.0-preview.1')
+        }
+    };
+    return requestOptions;
+}
+function createHttpClient() {
+    const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
+    const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
+    return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
+}
+function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
+    // don't pass changes upstream
+    const components = paths.slice();
+    // Add compression method to cache version to restore
+    // compressed cache as per compression method
+    if (compressionMethod) {
+        components.push(compressionMethod);
+    }
+    // Only check for windows platforms if enableCrossOsArchive is false
+    if (process.platform === 'win32' && !enableCrossOsArchive) {
+        components.push('windows-only');
+    }
+    // Add salt to cache version to support breaking changes in cache entry
+    components.push(versionSalt);
+    return crypto.createHash('sha256').update(components.join('|')).digest('hex');
+}
+exports.getCacheVersion = getCacheVersion;
+function getCacheEntry(keys, paths, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const httpClient = createHttpClient();
+        const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
+        const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
+        const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
+        // Cache not found
+        if (response.statusCode === 204) {
+            // List cache for primary key only if cache miss occurs
+            if (core.isDebug()) {
+                yield printCachesListForDiagnostics(keys[0], httpClient, version);
+            }
+            return null;
+        }
+        if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) {
+            throw new Error(`Cache service responded with ${response.statusCode}`);
+        }
+        const cacheResult = response.result;
+        const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
+        if (!cacheDownloadUrl) {
+            // Cache achiveLocation not found. This should never happen, and hence bail out.
+            throw new Error('Cache not found.');
+        }
+        core.setSecret(cacheDownloadUrl);
+        core.debug(`Cache Result:`);
+        core.debug(JSON.stringify(cacheResult));
+        return cacheResult;
+    });
+}
+exports.getCacheEntry = getCacheEntry;
+function printCachesListForDiagnostics(key, httpClient, version) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const resource = `caches?key=${encodeURIComponent(key)}`;
+        const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
+        if (response.statusCode === 200) {
+            const cacheListResult = response.result;
+            const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;
+            if (totalCount && totalCount > 0) {
+                core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`);
+                for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {
+                    core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);
+                }
+            }
+        }
+    });
+}
+function downloadCache(archiveLocation, archivePath, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const archiveUrl = new url_1.URL(archiveLocation);
+        const downloadOptions = (0, options_1.getDownloadOptions)(options);
+        if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
+            if (downloadOptions.useAzureSdk) {
+                // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
+                yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
+            }
+            else if (downloadOptions.concurrentBlobDownloads) {
+                // Use concurrent implementation with HttpClient to work around blob SDK issue
+                yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
+            }
+            else {
+                // Otherwise, download using the Actions http-client.
+                yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
+            }
+        }
+        else {
+            yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
+        }
+    });
+}
+exports.downloadCache = downloadCache;
+// Reserve Cache
+function reserveCache(key, paths, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const httpClient = createHttpClient();
+        const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
+        const reserveCacheRequest = {
+            key,
+            version,
+            cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
+        };
+        const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
+            return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
+        }));
+        return response;
+    });
+}
+exports.reserveCache = reserveCache;
+function getContentRange(start, end) {
+    // Format: `bytes start-end/filesize
+    // start and end are inclusive
+    // filesize can be *
+    // For a 200 byte chunk starting at byte 0:
+    // Content-Range: bytes 0-199/*
+    return `bytes ${start}-${end}/*`;
+}
+function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
+    return __awaiter(this, void 0, void 0, function* () {
+        core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
+        const additionalHeaders = {
+            'Content-Type': 'application/octet-stream',
+            'Content-Range': getContentRange(start, end)
+        };
+        const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
+            return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
+        }));
+        if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
+            throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
+        }
+    });
+}
+function uploadFile(httpClient, cacheId, archivePath, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        // Upload Chunks
+        const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
+        const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
+        const fd = fs.openSync(archivePath, 'r');
+        const uploadOptions = (0, options_1.getUploadOptions)(options);
+        const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
+        const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
+        const parallelUploads = [...new Array(concurrency).keys()];
+        core.debug('Awaiting all uploads');
+        let offset = 0;
+        try {
+            yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
+                while (offset < fileSize) {
+                    const chunkSize = Math.min(fileSize - offset, maxChunkSize);
+                    const start = offset;
+                    const end = offset + chunkSize - 1;
+                    offset += maxChunkSize;
+                    yield uploadChunk(httpClient, resourceUrl, () => fs
+                        .createReadStream(archivePath, {
+                        fd,
+                        start,
+                        end,
+                        autoClose: false
+                    })
+                        .on('error', error => {
+                        throw new Error(`Cache upload failed because file read failed with ${error.message}`);
+                    }), start, end);
+                }
+            })));
+        }
+        finally {
+            fs.closeSync(fd);
+        }
+        return;
+    });
+}
+function commitCache(httpClient, cacheId, filesize) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const commitCacheRequest = { size: filesize };
+        return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
+            return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
+        }));
+    });
+}
+function saveCache(cacheId, archivePath, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const httpClient = createHttpClient();
+        core.debug('Upload cache');
+        yield uploadFile(httpClient, cacheId, archivePath, options);
+        // Commit Cache
+        core.debug('Commiting cache');
+        const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
+        core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
+        const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
+        if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
+            throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
+        }
+        core.info('Cache saved successfully');
+    });
+}
+exports.saveCache = saveCache;
+//# sourceMappingURL=cacheHttpClient.js.map
+
+/***/ }),
+
+/***/ 1675:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+    if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+    var m = o[Symbol.asyncIterator], i;
+    return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+    function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+    function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+const exec = __importStar(__nccwpck_require__(8872));
+const glob = __importStar(__nccwpck_require__(6011));
+const io = __importStar(__nccwpck_require__(3357));
+const crypto = __importStar(__nccwpck_require__(6982));
+const fs = __importStar(__nccwpck_require__(9896));
+const path = __importStar(__nccwpck_require__(6928));
+const semver = __importStar(__nccwpck_require__(8804));
+const util = __importStar(__nccwpck_require__(9023));
+const constants_1 = __nccwpck_require__(2831);
+// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
+function createTempDirectory() {
+    return __awaiter(this, void 0, void 0, function* () {
+        const IS_WINDOWS = process.platform === 'win32';
+        let tempDirectory = process.env['RUNNER_TEMP'] || '';
+        if (!tempDirectory) {
+            let baseLocation;
+            if (IS_WINDOWS) {
+                // On Windows use the USERPROFILE env variable
+                baseLocation = process.env['USERPROFILE'] || 'C:\\';
+            }
+            else {
+                if (process.platform === 'darwin') {
+                    baseLocation = '/Users';
+                }
+                else {
+                    baseLocation = '/home';
+                }
+            }
+            tempDirectory = path.join(baseLocation, 'actions', 'temp');
+        }
+        const dest = path.join(tempDirectory, crypto.randomUUID());
+        yield io.mkdirP(dest);
+        return dest;
+    });
+}
+exports.createTempDirectory = createTempDirectory;
+function getArchiveFileSizeInBytes(filePath) {
+    return fs.statSync(filePath).size;
+}
+exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
+function resolvePaths(patterns) {
+    var _a, e_1, _b, _c;
+    var _d;
+    return __awaiter(this, void 0, void 0, function* () {
+        const paths = [];
+        const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
+        const globber = yield glob.create(patterns.join('\n'), {
+            implicitDescendants: false
+        });
+        try {
+            for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
+                _c = _g.value;
+                _e = false;
+                const file = _c;
+                const relativeFile = path
+                    .relative(workspace, file)
+                    .replace(new RegExp(`\\${path.sep}`, 'g'), '/');
+                core.debug(`Matched: ${relativeFile}`);
+                // Paths are made relative so the tar entries are all relative to the root of the workspace.
+                if (relativeFile === '') {
+                    // path.relative returns empty string if workspace and file are equal
+                    paths.push('.');
+                }
+                else {
+                    paths.push(`${relativeFile}`);
+                }
+            }
+        }
+        catch (e_1_1) { e_1 = { error: e_1_1 }; }
+        finally {
+            try {
+                if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
+            }
+            finally { if (e_1) throw e_1.error; }
+        }
+        return paths;
+    });
+}
+exports.resolvePaths = resolvePaths;
+function unlinkFile(filePath) {
+    return __awaiter(this, void 0, void 0, function* () {
+        return util.promisify(fs.unlink)(filePath);
+    });
+}
+exports.unlinkFile = unlinkFile;
+function getVersion(app, additionalArgs = []) {
+    return __awaiter(this, void 0, void 0, function* () {
+        let versionOutput = '';
+        additionalArgs.push('--version');
+        core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
+        try {
+            yield exec.exec(`${app}`, additionalArgs, {
+                ignoreReturnCode: true,
+                silent: true,
+                listeners: {
+                    stdout: (data) => (versionOutput += data.toString()),
+                    stderr: (data) => (versionOutput += data.toString())
+                }
+            });
+        }
+        catch (err) {
+            core.debug(err.message);
+        }
+        versionOutput = versionOutput.trim();
+        core.debug(versionOutput);
+        return versionOutput;
+    });
+}
+// Use zstandard if possible to maximize cache performance
+function getCompressionMethod() {
+    return __awaiter(this, void 0, void 0, function* () {
+        const versionOutput = yield getVersion('zstd', ['--quiet']);
+        const version = semver.clean(versionOutput);
+        core.debug(`zstd version: ${version}`);
+        if (versionOutput === '') {
+            return constants_1.CompressionMethod.Gzip;
+        }
+        else {
+            return constants_1.CompressionMethod.ZstdWithoutLong;
+        }
+    });
+}
+exports.getCompressionMethod = getCompressionMethod;
+function getCacheFileName(compressionMethod) {
+    return compressionMethod === constants_1.CompressionMethod.Gzip
+        ? constants_1.CacheFilename.Gzip
+        : constants_1.CacheFilename.Zstd;
+}
+exports.getCacheFileName = getCacheFileName;
+function getGnuTarPathOnWindows() {
+    return __awaiter(this, void 0, void 0, function* () {
+        if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
+            return constants_1.GnuTarPathOnWindows;
+        }
+        const versionOutput = yield getVersion('tar');
+        return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
+    });
+}
+exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
+function assertDefined(name, value) {
+    if (value === undefined) {
+        throw Error(`Expected ${name} but value was undefiend`);
+    }
+    return value;
+}
+exports.assertDefined = assertDefined;
+function isGhes() {
+    const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
+    const hostname = ghUrl.hostname.trimEnd().toUpperCase();
+    const isGitHubHost = hostname === 'GITHUB.COM';
+    const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
+    return !isGitHubHost && !isGheHost;
+}
+exports.isGhes = isGhes;
+//# sourceMappingURL=cacheUtils.js.map
+
+/***/ }),
+
+/***/ 2831:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
+var CacheFilename;
+(function (CacheFilename) {
+    CacheFilename["Gzip"] = "cache.tgz";
+    CacheFilename["Zstd"] = "cache.tzst";
+})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
+var CompressionMethod;
+(function (CompressionMethod) {
+    CompressionMethod["Gzip"] = "gzip";
+    // Long range mode was added to zstd in v1.3.2.
+    // This enum is for earlier version of zstd that does not have --long support
+    CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
+    CompressionMethod["Zstd"] = "zstd";
+})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
+var ArchiveToolType;
+(function (ArchiveToolType) {
+    ArchiveToolType["GNU"] = "gnu";
+    ArchiveToolType["BSD"] = "bsd";
+})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
+// The default number of retry attempts.
+exports.DefaultRetryAttempts = 2;
+// The default delay in milliseconds between retry attempts.
+exports.DefaultRetryDelay = 5000;
+// Socket timeout in milliseconds during download.  If no traffic is received
+// over the socket during this period, the socket is destroyed and the download
+// is aborted.
+exports.SocketTimeout = 5000;
+// The default path of GNUtar on hosted Windows runners
+exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
+// The default path of BSDtar on hosted Windows runners
+exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
+exports.TarFilename = 'cache.tar';
+exports.ManifestFilename = 'manifest.txt';
+//# sourceMappingURL=constants.js.map
+
+/***/ }),
+
+/***/ 5195:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+const http_client_1 = __nccwpck_require__(787);
+const storage_blob_1 = __nccwpck_require__(9784);
+const buffer = __importStar(__nccwpck_require__(181));
+const fs = __importStar(__nccwpck_require__(9896));
+const stream = __importStar(__nccwpck_require__(2203));
+const util = __importStar(__nccwpck_require__(9023));
+const utils = __importStar(__nccwpck_require__(1675));
+const constants_1 = __nccwpck_require__(2831);
+const requestUtils_1 = __nccwpck_require__(7342);
+const abort_controller_1 = __nccwpck_require__(349);
+/**
+ * Pipes the body of a HTTP response to a stream
+ *
+ * @param response the HTTP response
+ * @param output the writable stream
+ */
+function pipeResponseToStream(response, output) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const pipeline = util.promisify(stream.pipeline);
+        yield pipeline(response.message, output);
+    });
+}
+/**
+ * Class for tracking the download state and displaying stats.
+ */
+class DownloadProgress {
+    constructor(contentLength) {
+        this.contentLength = contentLength;
+        this.segmentIndex = 0;
+        this.segmentSize = 0;
+        this.segmentOffset = 0;
+        this.receivedBytes = 0;
+        this.displayedComplete = false;
+        this.startTime = Date.now();
+    }
+    /**
+     * Progress to the next segment. Only call this method when the previous segment
+     * is complete.
+     *
+     * @param segmentSize the length of the next segment
+     */
+    nextSegment(segmentSize) {
+        this.segmentOffset = this.segmentOffset + this.segmentSize;
+        this.segmentIndex = this.segmentIndex + 1;
+        this.segmentSize = segmentSize;
+        this.receivedBytes = 0;
+        core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
+    }
+    /**
+     * Sets the number of bytes received for the current segment.
+     *
+     * @param receivedBytes the number of bytes received
+     */
+    setReceivedBytes(receivedBytes) {
+        this.receivedBytes = receivedBytes;
+    }
+    /**
+     * Returns the total number of bytes transferred.
+     */
+    getTransferredBytes() {
+        return this.segmentOffset + this.receivedBytes;
+    }
+    /**
+     * Returns true if the download is complete.
+     */
+    isDone() {
+        return this.getTransferredBytes() === this.contentLength;
+    }
+    /**
+     * Prints the current download stats. Once the download completes, this will print one
+     * last line and then stop.
+     */
+    display() {
+        if (this.displayedComplete) {
+            return;
+        }
+        const transferredBytes = this.segmentOffset + this.receivedBytes;
+        const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
+        const elapsedTime = Date.now() - this.startTime;
+        const downloadSpeed = (transferredBytes /
+            (1024 * 1024) /
+            (elapsedTime / 1000)).toFixed(1);
+        core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
+        if (this.isDone()) {
+            this.displayedComplete = true;
+        }
+    }
+    /**
+     * Returns a function used to handle TransferProgressEvents.
+     */
+    onProgress() {
+        return (progress) => {
+            this.setReceivedBytes(progress.loadedBytes);
+        };
+    }
+    /**
+     * Starts the timer that displays the stats.
+     *
+     * @param delayInMs the delay between each write
+     */
+    startDisplayTimer(delayInMs = 1000) {
+        const displayCallback = () => {
+            this.display();
+            if (!this.isDone()) {
+                this.timeoutHandle = setTimeout(displayCallback, delayInMs);
+            }
+        };
+        this.timeoutHandle = setTimeout(displayCallback, delayInMs);
+    }
+    /**
+     * Stops the timer that displays the stats. As this typically indicates the download
+     * is complete, this will display one last line, unless the last line has already
+     * been written.
+     */
+    stopDisplayTimer() {
+        if (this.timeoutHandle) {
+            clearTimeout(this.timeoutHandle);
+            this.timeoutHandle = undefined;
+        }
+        this.display();
+    }
+}
+exports.DownloadProgress = DownloadProgress;
+/**
+ * Download the cache using the Actions toolkit http-client
+ *
+ * @param archiveLocation the URL for the cache
+ * @param archivePath the local path where the cache is saved
+ */
+function downloadCacheHttpClient(archiveLocation, archivePath) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const writeStream = fs.createWriteStream(archivePath);
+        const httpClient = new http_client_1.HttpClient('actions/cache');
+        const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
+        // Abort download if no traffic received over the socket.
+        downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
+            downloadResponse.message.destroy();
+            core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
+        });
+        yield pipeResponseToStream(downloadResponse, writeStream);
+        // Validate download size.
+        const contentLengthHeader = downloadResponse.message.headers['content-length'];
+        if (contentLengthHeader) {
+            const expectedLength = parseInt(contentLengthHeader);
+            const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
+            if (actualLength !== expectedLength) {
+                throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
+            }
+        }
+        else {
+            core.debug('Unable to validate download, no Content-Length header');
+        }
+    });
+}
+exports.downloadCacheHttpClient = downloadCacheHttpClient;
+/**
+ * Download the cache using the Actions toolkit http-client concurrently
+ *
+ * @param archiveLocation the URL for the cache
+ * @param archivePath the local path where the cache is saved
+ */
+function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
+    var _a;
+    return __awaiter(this, void 0, void 0, function* () {
+        const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
+        const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
+            socketTimeout: options.timeoutInMs,
+            keepAlive: true
+        });
+        try {
+            const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
+            const lengthHeader = res.message.headers['content-length'];
+            if (lengthHeader === undefined || lengthHeader === null) {
+                throw new Error('Content-Length not found on blob response');
+            }
+            const length = parseInt(lengthHeader);
+            if (Number.isNaN(length)) {
+                throw new Error(`Could not interpret Content-Length: ${length}`);
+            }
+            const downloads = [];
+            const blockSize = 4 * 1024 * 1024;
+            for (let offset = 0; offset < length; offset += blockSize) {
+                const count = Math.min(blockSize, length - offset);
+                downloads.push({
+                    offset,
+                    promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
+                        return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
+                    })
+                });
+            }
+            // reverse to use .pop instead of .shift
+            downloads.reverse();
+            let actives = 0;
+            let bytesDownloaded = 0;
+            const progress = new DownloadProgress(length);
+            progress.startDisplayTimer();
+            const progressFn = progress.onProgress();
+            const activeDownloads = [];
+            let nextDownload;
+            const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
+                const segment = yield Promise.race(Object.values(activeDownloads));
+                yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
+                actives--;
+                delete activeDownloads[segment.offset];
+                bytesDownloaded += segment.count;
+                progressFn({ loadedBytes: bytesDownloaded });
+            });
+            while ((nextDownload = downloads.pop())) {
+                activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
+                actives++;
+                if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
+                    yield waitAndWrite();
+                }
+            }
+            while (actives > 0) {
+                yield waitAndWrite();
+            }
+        }
+        finally {
+            httpClient.dispose();
+            yield archiveDescriptor.close();
+        }
+    });
+}
+exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
+function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const retries = 5;
+        let failures = 0;
+        while (true) {
+            try {
+                const timeout = 30000;
+                const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
+                if (typeof result === 'string') {
+                    throw new Error('downloadSegmentRetry failed due to timeout');
+                }
+                return result;
+            }
+            catch (err) {
+                if (failures >= retries) {
+                    throw err;
+                }
+                failures++;
+            }
+        }
+    });
+}
+function downloadSegment(httpClient, archiveLocation, offset, count) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
+            return yield httpClient.get(archiveLocation, {
+                Range: `bytes=${offset}-${offset + count - 1}`
+            });
+        }));
+        if (!partRes.readBodyBuffer) {
+            throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
+        }
+        return {
+            offset,
+            count,
+            buffer: yield partRes.readBodyBuffer()
+        };
+    });
+}
+/**
+ * Download the cache using the Azure Storage SDK.  Only call this method if the
+ * URL points to an Azure Storage endpoint.
+ *
+ * @param archiveLocation the URL for the cache
+ * @param archivePath the local path where the cache is saved
+ * @param options the download options with the defaults set
+ */
+function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
+    var _a;
+    return __awaiter(this, void 0, void 0, function* () {
+        const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
+            retryOptions: {
+                // Override the timeout used when downloading each 4 MB chunk
+                // The default is 2 min / MB, which is way too slow
+                tryTimeoutInMs: options.timeoutInMs
+            }
+        });
+        const properties = yield client.getProperties();
+        const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
+        if (contentLength < 0) {
+            // We should never hit this condition, but just in case fall back to downloading the
+            // file as one large stream
+            core.debug('Unable to determine content length, downloading file with http-client...');
+            yield downloadCacheHttpClient(archiveLocation, archivePath);
+        }
+        else {
+            // Use downloadToBuffer for faster downloads, since internally it splits the
+            // file into 4 MB chunks which can then be parallelized and retried independently
+            //
+            // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
+            // on 64-bit systems), split the download into multiple segments
+            // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
+            // Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast
+            const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
+            const downloadProgress = new DownloadProgress(contentLength);
+            const fd = fs.openSync(archivePath, 'w');
+            try {
+                downloadProgress.startDisplayTimer();
+                const controller = new abort_controller_1.AbortController();
+                const abortSignal = controller.signal;
+                while (!downloadProgress.isDone()) {
+                    const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
+                    const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
+                    downloadProgress.nextSegment(segmentSize);
+                    const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
+                        abortSignal,
+                        concurrency: options.downloadConcurrency,
+                        onProgress: downloadProgress.onProgress()
+                    }));
+                    if (result === 'timeout') {
+                        controller.abort();
+                        throw new Error('Aborting cache download as the download time exceeded the timeout.');
+                    }
+                    else if (Buffer.isBuffer(result)) {
+                        fs.writeFileSync(fd, result);
+                    }
+                }
+            }
+            finally {
+                downloadProgress.stopDisplayTimer();
+                fs.closeSync(fd);
+            }
+        }
+    });
+}
+exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
+const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
+    let timeoutHandle;
+    const timeoutPromise = new Promise(resolve => {
+        timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
+    });
+    return Promise.race([promise, timeoutPromise]).then(result => {
+        clearTimeout(timeoutHandle);
+        return result;
+    });
+});
+//# sourceMappingURL=downloadUtils.js.map
+
+/***/ }),
+
+/***/ 7342:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+const http_client_1 = __nccwpck_require__(787);
+const constants_1 = __nccwpck_require__(2831);
+function isSuccessStatusCode(statusCode) {
+    if (!statusCode) {
+        return false;
+    }
+    return statusCode >= 200 && statusCode < 300;
+}
+exports.isSuccessStatusCode = isSuccessStatusCode;
+function isServerErrorStatusCode(statusCode) {
+    if (!statusCode) {
+        return true;
+    }
+    return statusCode >= 500;
+}
+exports.isServerErrorStatusCode = isServerErrorStatusCode;
+function isRetryableStatusCode(statusCode) {
+    if (!statusCode) {
+        return false;
+    }
+    const retryableStatusCodes = [
+        http_client_1.HttpCodes.BadGateway,
+        http_client_1.HttpCodes.ServiceUnavailable,
+        http_client_1.HttpCodes.GatewayTimeout
+    ];
+    return retryableStatusCodes.includes(statusCode);
+}
+exports.isRetryableStatusCode = isRetryableStatusCode;
+function sleep(milliseconds) {
+    return __awaiter(this, void 0, void 0, function* () {
+        return new Promise(resolve => setTimeout(resolve, milliseconds));
+    });
+}
+function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {
+    return __awaiter(this, void 0, void 0, function* () {
+        let errorMessage = '';
+        let attempt = 1;
+        while (attempt <= maxAttempts) {
+            let response = undefined;
+            let statusCode = undefined;
+            let isRetryable = false;
+            try {
+                response = yield method();
+            }
+            catch (error) {
+                if (onError) {
+                    response = onError(error);
+                }
+                isRetryable = true;
+                errorMessage = error.message;
+            }
+            if (response) {
+                statusCode = getStatusCode(response);
+                if (!isServerErrorStatusCode(statusCode)) {
+                    return response;
+                }
+            }
+            if (statusCode) {
+                isRetryable = isRetryableStatusCode(statusCode);
+                errorMessage = `Cache service responded with ${statusCode}`;
+            }
+            core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
+            if (!isRetryable) {
+                core.debug(`${name} - Error is not retryable`);
+                break;
+            }
+            yield sleep(delay);
+            attempt++;
+        }
+        throw Error(`${name} failed: ${errorMessage}`);
+    });
+}
+exports.retry = retry;
+function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
+    return __awaiter(this, void 0, void 0, function* () {
+        return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, 
+        // If the error object contains the statusCode property, extract it and return
+        // an TypedResponse<T> so it can be processed by the retry logic.
+        (error) => {
+            if (error instanceof http_client_1.HttpClientError) {
+                return {
+                    statusCode: error.statusCode,
+                    result: null,
+                    headers: {},
+                    error
+                };
+            }
+            else {
+                return undefined;
+            }
+        });
+    });
+}
+exports.retryTypedResponse = retryTypedResponse;
+function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
+    return __awaiter(this, void 0, void 0, function* () {
+        return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay);
+    });
+}
+exports.retryHttpClientResponse = retryHttpClientResponse;
+//# sourceMappingURL=requestUtils.js.map
+
+/***/ }),
+
+/***/ 3177:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createTar = exports.extractTar = exports.listTar = void 0;
+const exec_1 = __nccwpck_require__(8872);
+const io = __importStar(__nccwpck_require__(3357));
+const fs_1 = __nccwpck_require__(9896);
+const path = __importStar(__nccwpck_require__(6928));
+const utils = __importStar(__nccwpck_require__(1675));
+const constants_1 = __nccwpck_require__(2831);
+const IS_WINDOWS = process.platform === 'win32';
+// Returns tar path and type: BSD or GNU
+function getTarPath() {
+    return __awaiter(this, void 0, void 0, function* () {
+        switch (process.platform) {
+            case 'win32': {
+                const gnuTar = yield utils.getGnuTarPathOnWindows();
+                const systemTar = constants_1.SystemTarPathOnWindows;
+                if (gnuTar) {
+                    // Use GNUtar as default on windows
+                    return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
+                }
+                else if ((0, fs_1.existsSync)(systemTar)) {
+                    return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
+                }
+                break;
+            }
+            case 'darwin': {
+                const gnuTar = yield io.which('gtar', false);
+                if (gnuTar) {
+                    // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
+                    return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
+                }
+                else {
+                    return {
+                        path: yield io.which('tar', true),
+                        type: constants_1.ArchiveToolType.BSD
+                    };
+                }
+            }
+            default:
+                break;
+        }
+        // Default assumption is GNU tar is present in path
+        return {
+            path: yield io.which('tar', true),
+            type: constants_1.ArchiveToolType.GNU
+        };
+    });
+}
+// Return arguments for tar as per tarPath, compressionMethod, method type and os
+function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
+    return __awaiter(this, void 0, void 0, function* () {
+        const args = [`"${tarPath.path}"`];
+        const cacheFileName = utils.getCacheFileName(compressionMethod);
+        const tarFile = 'cache.tar';
+        const workingDirectory = getWorkingDirectory();
+        // Speficic args for BSD tar on windows for workaround
+        const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
+            compressionMethod !== constants_1.CompressionMethod.Gzip &&
+            IS_WINDOWS;
+        // Method specific args
+        switch (type) {
+            case 'create':
+                args.push('--posix', '-cf', BSD_TAR_ZSTD
+                    ? tarFile
+                    : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
+                    ? tarFile
+                    : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
+                break;
+            case 'extract':
+                args.push('-xf', BSD_TAR_ZSTD
+                    ? tarFile
+                    : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
+                break;
+            case 'list':
+                args.push('-tf', BSD_TAR_ZSTD
+                    ? tarFile
+                    : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
+                break;
+        }
+        // Platform specific args
+        if (tarPath.type === constants_1.ArchiveToolType.GNU) {
+            switch (process.platform) {
+                case 'win32':
+                    args.push('--force-local');
+                    break;
+                case 'darwin':
+                    args.push('--delay-directory-restore');
+                    break;
+            }
+        }
+        return args;
+    });
+}
+// Returns commands to run tar and compression program
+function getCommands(compressionMethod, type, archivePath = '') {
+    return __awaiter(this, void 0, void 0, function* () {
+        let args;
+        const tarPath = yield getTarPath();
+        const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
+        const compressionArgs = type !== 'create'
+            ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
+            : yield getCompressionProgram(tarPath, compressionMethod);
+        const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
+            compressionMethod !== constants_1.CompressionMethod.Gzip &&
+            IS_WINDOWS;
+        if (BSD_TAR_ZSTD && type !== 'create') {
+            args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
+        }
+        else {
+            args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
+        }
+        if (BSD_TAR_ZSTD) {
+            return args;
+        }
+        return [args.join(' ')];
+    });
+}
+function getWorkingDirectory() {
+    var _a;
+    return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
+}
+// Common function for extractTar and listTar to get the compression method
+function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
+    return __awaiter(this, void 0, void 0, function* () {
+        // -d: Decompress.
+        // unzstd is equivalent to 'zstd -d'
+        // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
+        // Using 30 here because we also support 32-bit self-hosted runners.
+        const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
+            compressionMethod !== constants_1.CompressionMethod.Gzip &&
+            IS_WINDOWS;
+        switch (compressionMethod) {
+            case constants_1.CompressionMethod.Zstd:
+                return BSD_TAR_ZSTD
+                    ? [
+                        'zstd -d --long=30 --force -o',
+                        constants_1.TarFilename,
+                        archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
+                    ]
+                    : [
+                        '--use-compress-program',
+                        IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
+                    ];
+            case constants_1.CompressionMethod.ZstdWithoutLong:
+                return BSD_TAR_ZSTD
+                    ? [
+                        'zstd -d --force -o',
+                        constants_1.TarFilename,
+                        archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
+                    ]
+                    : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
+            default:
+                return ['-z'];
+        }
+    });
+}
+// Used for creating the archive
+// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
+// zstdmt is equivalent to 'zstd -T0'
+// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
+// Using 30 here because we also support 32-bit self-hosted runners.
+// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
+function getCompressionProgram(tarPath, compressionMethod) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const cacheFileName = utils.getCacheFileName(compressionMethod);
+        const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
+            compressionMethod !== constants_1.CompressionMethod.Gzip &&
+            IS_WINDOWS;
+        switch (compressionMethod) {
+            case constants_1.CompressionMethod.Zstd:
+                return BSD_TAR_ZSTD
+                    ? [
+                        'zstd -T0 --long=30 --force -o',
+                        cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
+                        constants_1.TarFilename
+                    ]
+                    : [
+                        '--use-compress-program',
+                        IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
+                    ];
+            case constants_1.CompressionMethod.ZstdWithoutLong:
+                return BSD_TAR_ZSTD
+                    ? [
+                        'zstd -T0 --force -o',
+                        cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
+                        constants_1.TarFilename
+                    ]
+                    : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
+            default:
+                return ['-z'];
+        }
+    });
+}
+// Executes all commands as separate processes
+function execCommands(commands, cwd) {
+    return __awaiter(this, void 0, void 0, function* () {
+        for (const command of commands) {
+            try {
+                yield (0, exec_1.exec)(command, undefined, {
+                    cwd,
+                    env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' })
+                });
+            }
+            catch (error) {
+                throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
+            }
+        }
+    });
+}
+// List the contents of a tar
+function listTar(archivePath, compressionMethod) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const commands = yield getCommands(compressionMethod, 'list', archivePath);
+        yield execCommands(commands);
+    });
+}
+exports.listTar = listTar;
+// Extract a tar
+function extractTar(archivePath, compressionMethod) {
+    return __awaiter(this, void 0, void 0, function* () {
+        // Create directory to extract tar into
+        const workingDirectory = getWorkingDirectory();
+        yield io.mkdirP(workingDirectory);
+        const commands = yield getCommands(compressionMethod, 'extract', archivePath);
+        yield execCommands(commands);
+    });
+}
+exports.extractTar = extractTar;
+// Create a tar
+function createTar(archiveFolder, sourceDirectories, compressionMethod) {
+    return __awaiter(this, void 0, void 0, function* () {
+        // Write source directories to manifest.txt to avoid command length limits
+        (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
+        const commands = yield getCommands(compressionMethod, 'create');
+        yield execCommands(commands, archiveFolder);
+    });
+}
+exports.createTar = createTar;
+//# sourceMappingURL=tar.js.map
+
+/***/ }),
+
+/***/ 8820:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getDownloadOptions = exports.getUploadOptions = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+/**
+ * Returns a copy of the upload options with defaults filled in.
+ *
+ * @param copy the original upload options
+ */
+function getUploadOptions(copy) {
+    const result = {
+        uploadConcurrency: 4,
+        uploadChunkSize: 32 * 1024 * 1024
+    };
+    if (copy) {
+        if (typeof copy.uploadConcurrency === 'number') {
+            result.uploadConcurrency = copy.uploadConcurrency;
+        }
+        if (typeof copy.uploadChunkSize === 'number') {
+            result.uploadChunkSize = copy.uploadChunkSize;
+        }
+    }
+    core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
+    core.debug(`Upload chunk size: ${result.uploadChunkSize}`);
+    return result;
+}
+exports.getUploadOptions = getUploadOptions;
+/**
+ * Returns a copy of the download options with defaults filled in.
+ *
+ * @param copy the original download options
+ */
+function getDownloadOptions(copy) {
+    const result = {
+        useAzureSdk: false,
+        concurrentBlobDownloads: true,
+        downloadConcurrency: 8,
+        timeoutInMs: 30000,
+        segmentTimeoutInMs: 600000,
+        lookupOnly: false
+    };
+    if (copy) {
+        if (typeof copy.useAzureSdk === 'boolean') {
+            result.useAzureSdk = copy.useAzureSdk;
+        }
+        if (typeof copy.concurrentBlobDownloads === 'boolean') {
+            result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
+        }
+        if (typeof copy.downloadConcurrency === 'number') {
+            result.downloadConcurrency = copy.downloadConcurrency;
+        }
+        if (typeof copy.timeoutInMs === 'number') {
+            result.timeoutInMs = copy.timeoutInMs;
+        }
+        if (typeof copy.segmentTimeoutInMs === 'number') {
+            result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
+        }
+        if (typeof copy.lookupOnly === 'boolean') {
+            result.lookupOnly = copy.lookupOnly;
+        }
+    }
+    const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
+    if (segmentDownloadTimeoutMins &&
+        !isNaN(Number(segmentDownloadTimeoutMins)) &&
+        isFinite(Number(segmentDownloadTimeoutMins))) {
+        result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
+    }
+    core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
+    core.debug(`Download concurrency: ${result.downloadConcurrency}`);
+    core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
+    core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
+    core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
+    core.debug(`Lookup only: ${result.lookupOnly}`);
+    return result;
+}
+exports.getDownloadOptions = getDownloadOptions;
+//# sourceMappingURL=options.js.map
+
+/***/ }),
+
+/***/ 3191:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.issue = exports.issueCommand = void 0;
+const os = __importStar(__nccwpck_require__(857));
+const utils_1 = __nccwpck_require__(6283);
+/**
+ * Commands
+ *
+ * Command Format:
+ *   ::name key=value,key=value::message
+ *
+ * Examples:
+ *   ::warning::This is the message
+ *   ::set-env name=MY_VAR::some value
+ */
+function issueCommand(command, properties, message) {
+    const cmd = new Command(command, properties, message);
+    process.stdout.write(cmd.toString() + os.EOL);
+}
+exports.issueCommand = issueCommand;
+function issue(name, message = '') {
+    issueCommand(name, {}, message);
+}
+exports.issue = issue;
+const CMD_STRING = '::';
+class Command {
+    constructor(command, properties, message) {
+        if (!command) {
+            command = 'missing.command';
+        }
+        this.command = command;
+        this.properties = properties;
+        this.message = message;
+    }
+    toString() {
+        let cmdStr = CMD_STRING + this.command;
+        if (this.properties && Object.keys(this.properties).length > 0) {
+            cmdStr += ' ';
+            let first = true;
+            for (const key in this.properties) {
+                if (this.properties.hasOwnProperty(key)) {
+                    const val = this.properties[key];
+                    if (val) {
+                        if (first) {
+                            first = false;
+                        }
+                        else {
+                            cmdStr += ',';
+                        }
+                        cmdStr += `${key}=${escapeProperty(val)}`;
+                    }
+                }
+            }
+        }
+        cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
+        return cmdStr;
+    }
+}
+function escapeData(s) {
+    return (0, utils_1.toCommandValue)(s)
+        .replace(/%/g, '%25')
+        .replace(/\r/g, '%0D')
+        .replace(/\n/g, '%0A');
+}
+function escapeProperty(s) {
+    return (0, utils_1.toCommandValue)(s)
+        .replace(/%/g, '%25')
+        .replace(/\r/g, '%0D')
+        .replace(/\n/g, '%0A')
+        .replace(/:/g, '%3A')
+        .replace(/,/g, '%2C');
+}
+//# sourceMappingURL=command.js.map
+
+/***/ }),
+
+/***/ 9999:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.platform = exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = exports.markdownSummary = exports.summary = exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
+const command_1 = __nccwpck_require__(3191);
+const file_command_1 = __nccwpck_require__(9058);
+const utils_1 = __nccwpck_require__(6283);
+const os = __importStar(__nccwpck_require__(857));
+const path = __importStar(__nccwpck_require__(6928));
+const oidc_utils_1 = __nccwpck_require__(3549);
+/**
+ * The code to exit an action
+ */
+var ExitCode;
+(function (ExitCode) {
+    /**
+     * A code indicating that the action was successful
+     */
+    ExitCode[ExitCode["Success"] = 0] = "Success";
+    /**
+     * A code indicating that the action was a failure
+     */
+    ExitCode[ExitCode["Failure"] = 1] = "Failure";
+})(ExitCode || (exports.ExitCode = ExitCode = {}));
+//-----------------------------------------------------------------------
+// Variables
+//-----------------------------------------------------------------------
+/**
+ * Sets env variable for this action and future actions in the job
+ * @param name the name of the variable to set
+ * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function exportVariable(name, val) {
+    const convertedVal = (0, utils_1.toCommandValue)(val);
+    process.env[name] = convertedVal;
+    const filePath = process.env['GITHUB_ENV'] || '';
+    if (filePath) {
+        return (0, file_command_1.issueFileCommand)('ENV', (0, file_command_1.prepareKeyValueMessage)(name, val));
+    }
+    (0, command_1.issueCommand)('set-env', { name }, convertedVal);
+}
+exports.exportVariable = exportVariable;
+/**
+ * Registers a secret which will get masked from logs
+ * @param secret value of the secret
+ */
+function setSecret(secret) {
+    (0, command_1.issueCommand)('add-mask', {}, secret);
+}
+exports.setSecret = setSecret;
+/**
+ * Prepends inputPath to the PATH (for this action and future actions)
+ * @param inputPath
+ */
+function addPath(inputPath) {
+    const filePath = process.env['GITHUB_PATH'] || '';
+    if (filePath) {
+        (0, file_command_1.issueFileCommand)('PATH', inputPath);
+    }
+    else {
+        (0, command_1.issueCommand)('add-path', {}, inputPath);
+    }
+    process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
+}
+exports.addPath = addPath;
+/**
+ * Gets the value of an input.
+ * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
+ * Returns an empty string if the value is not defined.
+ *
+ * @param     name     name of the input to get
+ * @param     options  optional. See InputOptions.
+ * @returns   string
+ */
+function getInput(name, options) {
+    const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
+    if (options && options.required && !val) {
+        throw new Error(`Input required and not supplied: ${name}`);
+    }
+    if (options && options.trimWhitespace === false) {
+        return val;
+    }
+    return val.trim();
+}
+exports.getInput = getInput;
+/**
+ * Gets the values of an multiline input.  Each value is also trimmed.
+ *
+ * @param     name     name of the input to get
+ * @param     options  optional. See InputOptions.
+ * @returns   string[]
+ *
+ */
+function getMultilineInput(name, options) {
+    const inputs = getInput(name, options)
+        .split('\n')
+        .filter(x => x !== '');
+    if (options && options.trimWhitespace === false) {
+        return inputs;
+    }
+    return inputs.map(input => input.trim());
+}
+exports.getMultilineInput = getMultilineInput;
+/**
+ * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
+ * Support boolean input list: `true | True | TRUE | false | False | FALSE` .
+ * The return value is also in boolean type.
+ * ref: https://yaml.org/spec/1.2/spec.html#id2804923
+ *
+ * @param     name     name of the input to get
+ * @param     options  optional. See InputOptions.
+ * @returns   boolean
+ */
+function getBooleanInput(name, options) {
+    const trueValue = ['true', 'True', 'TRUE'];
+    const falseValue = ['false', 'False', 'FALSE'];
+    const val = getInput(name, options);
+    if (trueValue.includes(val))
+        return true;
+    if (falseValue.includes(val))
+        return false;
+    throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
+        `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
+}
+exports.getBooleanInput = getBooleanInput;
+/**
+ * Sets the value of an output.
+ *
+ * @param     name     name of the output to set
+ * @param     value    value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function setOutput(name, value) {
+    const filePath = process.env['GITHUB_OUTPUT'] || '';
+    if (filePath) {
+        return (0, file_command_1.issueFileCommand)('OUTPUT', (0, file_command_1.prepareKeyValueMessage)(name, value));
+    }
+    process.stdout.write(os.EOL);
+    (0, command_1.issueCommand)('set-output', { name }, (0, utils_1.toCommandValue)(value));
+}
+exports.setOutput = setOutput;
+/**
+ * Enables or disables the echoing of commands into stdout for the rest of the step.
+ * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
+ *
+ */
+function setCommandEcho(enabled) {
+    (0, command_1.issue)('echo', enabled ? 'on' : 'off');
+}
+exports.setCommandEcho = setCommandEcho;
+//-----------------------------------------------------------------------
+// Results
+//-----------------------------------------------------------------------
+/**
+ * Sets the action status to failed.
+ * When the action exits it will be with an exit code of 1
+ * @param message add error issue message
+ */
+function setFailed(message) {
+    process.exitCode = ExitCode.Failure;
+    error(message);
+}
+exports.setFailed = setFailed;
+//-----------------------------------------------------------------------
+// Logging Commands
+//-----------------------------------------------------------------------
+/**
+ * Gets whether Actions Step Debug is on or not
+ */
+function isDebug() {
+    return process.env['RUNNER_DEBUG'] === '1';
+}
+exports.isDebug = isDebug;
+/**
+ * Writes debug message to user log
+ * @param message debug message
+ */
+function debug(message) {
+    (0, command_1.issueCommand)('debug', {}, message);
+}
+exports.debug = debug;
+/**
+ * Adds an error issue
+ * @param message error issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function error(message, properties = {}) {
+    (0, command_1.issueCommand)('error', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message);
+}
+exports.error = error;
+/**
+ * Adds a warning issue
+ * @param message warning issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function warning(message, properties = {}) {
+    (0, command_1.issueCommand)('warning', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message);
+}
+exports.warning = warning;
+/**
+ * Adds a notice issue
+ * @param message notice issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function notice(message, properties = {}) {
+    (0, command_1.issueCommand)('notice', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message);
+}
+exports.notice = notice;
+/**
+ * Writes info to log with console.log.
+ * @param message info message
+ */
+function info(message) {
+    process.stdout.write(message + os.EOL);
+}
+exports.info = info;
+/**
+ * Begin an output group.
+ *
+ * Output until the next `groupEnd` will be foldable in this group
+ *
+ * @param name The name of the output group
+ */
+function startGroup(name) {
+    (0, command_1.issue)('group', name);
+}
+exports.startGroup = startGroup;
+/**
+ * End an output group.
+ */
+function endGroup() {
+    (0, command_1.issue)('endgroup');
+}
+exports.endGroup = endGroup;
+/**
+ * Wrap an asynchronous function call in a group.
+ *
+ * Returns the same type as the function itself.
+ *
+ * @param name The name of the group
+ * @param fn The function to wrap in the group
+ */
+function group(name, fn) {
+    return __awaiter(this, void 0, void 0, function* () {
+        startGroup(name);
+        let result;
+        try {
+            result = yield fn();
+        }
+        finally {
+            endGroup();
+        }
+        return result;
+    });
+}
+exports.group = group;
+//-----------------------------------------------------------------------
+// Wrapper action state
+//-----------------------------------------------------------------------
+/**
+ * Saves state for current action, the state can only be retrieved by this action's post job execution.
+ *
+ * @param     name     name of the state to store
+ * @param     value    value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function saveState(name, value) {
+    const filePath = process.env['GITHUB_STATE'] || '';
+    if (filePath) {
+        return (0, file_command_1.issueFileCommand)('STATE', (0, file_command_1.prepareKeyValueMessage)(name, value));
+    }
+    (0, command_1.issueCommand)('save-state', { name }, (0, utils_1.toCommandValue)(value));
+}
+exports.saveState = saveState;
+/**
+ * Gets the value of an state set by this action's main execution.
+ *
+ * @param     name     name of the state to get
+ * @returns   string
+ */
+function getState(name) {
+    return process.env[`STATE_${name}`] || '';
+}
+exports.getState = getState;
+function getIDToken(aud) {
+    return __awaiter(this, void 0, void 0, function* () {
+        return yield oidc_utils_1.OidcClient.getIDToken(aud);
+    });
+}
+exports.getIDToken = getIDToken;
+/**
+ * Summary exports
+ */
+var summary_1 = __nccwpck_require__(1638);
+Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
+/**
+ * @deprecated use core.summary
+ */
+var summary_2 = __nccwpck_require__(1638);
+Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
+/**
+ * Path exports
+ */
+var path_utils_1 = __nccwpck_require__(539);
+Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
+Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
+Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
+/**
+ * Platform utilities exports
+ */
+exports.platform = __importStar(__nccwpck_require__(2563));
+//# sourceMappingURL=core.js.map
+
+/***/ }),
+
+/***/ 9058:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+// For internal use, subject to change.
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+const crypto = __importStar(__nccwpck_require__(6982));
+const fs = __importStar(__nccwpck_require__(9896));
+const os = __importStar(__nccwpck_require__(857));
+const utils_1 = __nccwpck_require__(6283);
+function issueFileCommand(command, message) {
+    const filePath = process.env[`GITHUB_${command}`];
+    if (!filePath) {
+        throw new Error(`Unable to find environment variable for file command ${command}`);
+    }
+    if (!fs.existsSync(filePath)) {
+        throw new Error(`Missing file at path: ${filePath}`);
+    }
+    fs.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, {
+        encoding: 'utf8'
+    });
+}
+exports.issueFileCommand = issueFileCommand;
+function prepareKeyValueMessage(key, value) {
+    const delimiter = `ghadelimiter_${crypto.randomUUID()}`;
+    const convertedValue = (0, utils_1.toCommandValue)(value);
+    // These should realistically never happen, but just in case someone finds a
+    // way to exploit uuid generation let's not allow keys or values that contain
+    // the delimiter.
+    if (key.includes(delimiter)) {
+        throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
+    }
+    if (convertedValue.includes(delimiter)) {
+        throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
+    }
+    return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
+}
+exports.prepareKeyValueMessage = prepareKeyValueMessage;
+//# sourceMappingURL=file-command.js.map
+
+/***/ }),
+
+/***/ 3549:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.OidcClient = void 0;
+const http_client_1 = __nccwpck_require__(787);
+const auth_1 = __nccwpck_require__(3673);
+const core_1 = __nccwpck_require__(9999);
+class OidcClient {
+    static createHttpClient(allowRetry = true, maxRetry = 10) {
+        const requestOptions = {
+            allowRetries: allowRetry,
+            maxRetries: maxRetry
+        };
+        return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
+    }
+    static getRequestToken() {
+        const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
+        if (!token) {
+            throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
+        }
+        return token;
+    }
+    static getIDTokenUrl() {
+        const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
+        if (!runtimeUrl) {
+            throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
+        }
+        return runtimeUrl;
+    }
+    static getCall(id_token_url) {
+        var _a;
+        return __awaiter(this, void 0, void 0, function* () {
+            const httpclient = OidcClient.createHttpClient();
+            const res = yield httpclient
+                .getJson(id_token_url)
+                .catch(error => {
+                throw new Error(`Failed to get ID Token. \n 
+        Error Code : ${error.statusCode}\n 
+        Error Message: ${error.message}`);
+            });
+            const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
+            if (!id_token) {
+                throw new Error('Response json body do not have ID Token field');
+            }
+            return id_token;
+        });
+    }
+    static getIDToken(audience) {
+        return __awaiter(this, void 0, void 0, function* () {
+            try {
+                // New ID Token is requested from action service
+                let id_token_url = OidcClient.getIDTokenUrl();
+                if (audience) {
+                    const encodedAudience = encodeURIComponent(audience);
+                    id_token_url = `${id_token_url}&audience=${encodedAudience}`;
+                }
+                (0, core_1.debug)(`ID token url is ${id_token_url}`);
+                const id_token = yield OidcClient.getCall(id_token_url);
+                (0, core_1.setSecret)(id_token);
+                return id_token;
+            }
+            catch (error) {
+                throw new Error(`Error message: ${error.message}`);
+            }
+        });
+    }
+}
+exports.OidcClient = OidcClient;
+//# sourceMappingURL=oidc-utils.js.map
+
+/***/ }),
+
+/***/ 539:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
+const path = __importStar(__nccwpck_require__(6928));
+/**
+ * toPosixPath converts the given path to the posix form. On Windows, \\ will be
+ * replaced with /.
+ *
+ * @param pth. Path to transform.
+ * @return string Posix path.
+ */
+function toPosixPath(pth) {
+    return pth.replace(/[\\]/g, '/');
+}
+exports.toPosixPath = toPosixPath;
+/**
+ * toWin32Path converts the given path to the win32 form. On Linux, / will be
+ * replaced with \\.
+ *
+ * @param pth. Path to transform.
+ * @return string Win32 path.
+ */
+function toWin32Path(pth) {
+    return pth.replace(/[/]/g, '\\');
+}
+exports.toWin32Path = toWin32Path;
+/**
+ * toPlatformPath converts the given path to a platform-specific path. It does
+ * this by replacing instances of / and \ with the platform-specific path
+ * separator.
+ *
+ * @param pth The path to platformize.
+ * @return string The platform-specific path.
+ */
+function toPlatformPath(pth) {
+    return pth.replace(/[/\\]/g, path.sep);
+}
+exports.toPlatformPath = toPlatformPath;
+//# sourceMappingURL=path-utils.js.map
+
+/***/ }),
+
+/***/ 2563:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getDetails = exports.isLinux = exports.isMacOS = exports.isWindows = exports.arch = exports.platform = void 0;
+const os_1 = __importDefault(__nccwpck_require__(857));
+const exec = __importStar(__nccwpck_require__(8872));
+const getWindowsInfo = () => __awaiter(void 0, void 0, void 0, function* () {
+    const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, {
+        silent: true
+    });
+    const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, {
+        silent: true
+    });
+    return {
+        name: name.trim(),
+        version: version.trim()
+    };
+});
+const getMacOsInfo = () => __awaiter(void 0, void 0, void 0, function* () {
+    var _a, _b, _c, _d;
+    const { stdout } = yield exec.getExecOutput('sw_vers', undefined, {
+        silent: true
+    });
+    const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : '';
+    const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : '';
+    return {
+        name,
+        version
+    };
+});
+const getLinuxInfo = () => __awaiter(void 0, void 0, void 0, function* () {
+    const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], {
+        silent: true
+    });
+    const [name, version] = stdout.trim().split('\n');
+    return {
+        name,
+        version
+    };
+});
+exports.platform = os_1.default.platform();
+exports.arch = os_1.default.arch();
+exports.isWindows = exports.platform === 'win32';
+exports.isMacOS = exports.platform === 'darwin';
+exports.isLinux = exports.platform === 'linux';
+function getDetails() {
+    return __awaiter(this, void 0, void 0, function* () {
+        return Object.assign(Object.assign({}, (yield (exports.isWindows
+            ? getWindowsInfo()
+            : exports.isMacOS
+                ? getMacOsInfo()
+                : getLinuxInfo()))), { platform: exports.platform,
+            arch: exports.arch,
+            isWindows: exports.isWindows,
+            isMacOS: exports.isMacOS,
+            isLinux: exports.isLinux });
+    });
+}
+exports.getDetails = getDetails;
+//# sourceMappingURL=platform.js.map
+
+/***/ }),
+
+/***/ 1638:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
+const os_1 = __nccwpck_require__(857);
+const fs_1 = __nccwpck_require__(9896);
+const { access, appendFile, writeFile } = fs_1.promises;
+exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
+exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
+class Summary {
+    constructor() {
+        this._buffer = '';
+    }
+    /**
+     * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
+     * Also checks r/w permissions.
+     *
+     * @returns step summary file path
+     */
+    filePath() {
+        return __awaiter(this, void 0, void 0, function* () {
+            if (this._filePath) {
+                return this._filePath;
+            }
+            const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
+            if (!pathFromEnv) {
+                throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
+            }
+            try {
+                yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
+            }
+            catch (_a) {
+                throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
+            }
+            this._filePath = pathFromEnv;
+            return this._filePath;
+        });
+    }
+    /**
+     * Wraps content in an HTML tag, adding any HTML attributes
+     *
+     * @param {string} tag HTML tag to wrap
+     * @param {string | null} content content within the tag
+     * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
+     *
+     * @returns {string} content wrapped in HTML element
+     */
+    wrap(tag, content, attrs = {}) {
+        const htmlAttrs = Object.entries(attrs)
+            .map(([key, value]) => ` ${key}="${value}"`)
+            .join('');
+        if (!content) {
+            return `<${tag}${htmlAttrs}>`;
+        }
+        return `<${tag}${htmlAttrs}>${content}</${tag}>`;
+    }
+    /**
+     * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
+     *
+     * @param {SummaryWriteOptions} [options] (optional) options for write operation
+     *
+     * @returns {Promise<Summary>} summary instance
+     */
+    write(options) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
+            const filePath = yield this.filePath();
+            const writeFunc = overwrite ? writeFile : appendFile;
+            yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
+            return this.emptyBuffer();
+        });
+    }
+    /**
+     * Clears the summary buffer and wipes the summary file
+     *
+     * @returns {Summary} summary instance
+     */
+    clear() {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.emptyBuffer().write({ overwrite: true });
+        });
+    }
+    /**
+     * Returns the current summary buffer as a string
+     *
+     * @returns {string} string of summary buffer
+     */
+    stringify() {
+        return this._buffer;
+    }
+    /**
+     * If the summary buffer is empty
+     *
+     * @returns {boolen} true if the buffer is empty
+     */
+    isEmptyBuffer() {
+        return this._buffer.length === 0;
+    }
+    /**
+     * Resets the summary buffer without writing to summary file
+     *
+     * @returns {Summary} summary instance
+     */
+    emptyBuffer() {
+        this._buffer = '';
+        return this;
+    }
+    /**
+     * Adds raw text to the summary buffer
+     *
+     * @param {string} text content to add
+     * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
+     *
+     * @returns {Summary} summary instance
+     */
+    addRaw(text, addEOL = false) {
+        this._buffer += text;
+        return addEOL ? this.addEOL() : this;
+    }
+    /**
+     * Adds the operating system-specific end-of-line marker to the buffer
+     *
+     * @returns {Summary} summary instance
+     */
+    addEOL() {
+        return this.addRaw(os_1.EOL);
+    }
+    /**
+     * Adds an HTML codeblock to the summary buffer
+     *
+     * @param {string} code content to render within fenced code block
+     * @param {string} lang (optional) language to syntax highlight code
+     *
+     * @returns {Summary} summary instance
+     */
+    addCodeBlock(code, lang) {
+        const attrs = Object.assign({}, (lang && { lang }));
+        const element = this.wrap('pre', this.wrap('code', code), attrs);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML list to the summary buffer
+     *
+     * @param {string[]} items list of items to render
+     * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
+     *
+     * @returns {Summary} summary instance
+     */
+    addList(items, ordered = false) {
+        const tag = ordered ? 'ol' : 'ul';
+        const listItems = items.map(item => this.wrap('li', item)).join('');
+        const element = this.wrap(tag, listItems);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML table to the summary buffer
+     *
+     * @param {SummaryTableCell[]} rows table rows
+     *
+     * @returns {Summary} summary instance
+     */
+    addTable(rows) {
+        const tableBody = rows
+            .map(row => {
+            const cells = row
+                .map(cell => {
+                if (typeof cell === 'string') {
+                    return this.wrap('td', cell);
+                }
+                const { header, data, colspan, rowspan } = cell;
+                const tag = header ? 'th' : 'td';
+                const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
+                return this.wrap(tag, data, attrs);
+            })
+                .join('');
+            return this.wrap('tr', cells);
+        })
+            .join('');
+        const element = this.wrap('table', tableBody);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds a collapsable HTML details element to the summary buffer
+     *
+     * @param {string} label text for the closed state
+     * @param {string} content collapsable content
+     *
+     * @returns {Summary} summary instance
+     */
+    addDetails(label, content) {
+        const element = this.wrap('details', this.wrap('summary', label) + content);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML image tag to the summary buffer
+     *
+     * @param {string} src path to the image you to embed
+     * @param {string} alt text description of the image
+     * @param {SummaryImageOptions} options (optional) addition image attributes
+     *
+     * @returns {Summary} summary instance
+     */
+    addImage(src, alt, options) {
+        const { width, height } = options || {};
+        const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
+        const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML section heading element
+     *
+     * @param {string} text heading text
+     * @param {number | string} [level=1] (optional) the heading level, default: 1
+     *
+     * @returns {Summary} summary instance
+     */
+    addHeading(text, level) {
+        const tag = `h${level}`;
+        const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
+            ? tag
+            : 'h1';
+        const element = this.wrap(allowedTag, text);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML thematic break (<hr>) to the summary buffer
+     *
+     * @returns {Summary} summary instance
+     */
+    addSeparator() {
+        const element = this.wrap('hr', null);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML line break (<br>) to the summary buffer
+     *
+     * @returns {Summary} summary instance
+     */
+    addBreak() {
+        const element = this.wrap('br', null);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML blockquote to the summary buffer
+     *
+     * @param {string} text quote text
+     * @param {string} cite (optional) citation url
+     *
+     * @returns {Summary} summary instance
+     */
+    addQuote(text, cite) {
+        const attrs = Object.assign({}, (cite && { cite }));
+        const element = this.wrap('blockquote', text, attrs);
+        return this.addRaw(element).addEOL();
+    }
+    /**
+     * Adds an HTML anchor tag to the summary buffer
+     *
+     * @param {string} text link text/content
+     * @param {string} href hyperlink
+     *
+     * @returns {Summary} summary instance
+     */
+    addLink(text, href) {
+        const element = this.wrap('a', text, { href });
+        return this.addRaw(element).addEOL();
+    }
+}
+const _summary = new Summary();
+/**
+ * @deprecated use `core.summary`
+ */
+exports.markdownSummary = _summary;
+exports.summary = _summary;
+//# sourceMappingURL=summary.js.map
+
+/***/ }),
+
+/***/ 6283:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toCommandProperties = exports.toCommandValue = void 0;
+/**
+ * Sanitizes an input into a string so it can be passed into issueCommand safely
+ * @param input input to sanitize into a string
+ */
+function toCommandValue(input) {
+    if (input === null || input === undefined) {
+        return '';
+    }
+    else if (typeof input === 'string' || input instanceof String) {
+        return input;
+    }
+    return JSON.stringify(input);
+}
+exports.toCommandValue = toCommandValue;
+/**
+ *
+ * @param annotationProperties
+ * @returns The command properties to send with the actual annotation command
+ * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
+ */
+function toCommandProperties(annotationProperties) {
+    if (!Object.keys(annotationProperties).length) {
+        return {};
+    }
+    return {
+        title: annotationProperties.title,
+        file: annotationProperties.file,
+        line: annotationProperties.startLine,
+        endLine: annotationProperties.endLine,
+        col: annotationProperties.startColumn,
+        endColumn: annotationProperties.endColumn
+    };
+}
+exports.toCommandProperties = toCommandProperties;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 8872:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getExecOutput = exports.exec = void 0;
+const string_decoder_1 = __nccwpck_require__(3193);
+const tr = __importStar(__nccwpck_require__(3725));
+/**
+ * Exec a command.
+ * Output will be streamed to the live console.
+ * Returns promise with return code
+ *
+ * @param     commandLine        command to execute (can include additional args). Must be correctly escaped.
+ * @param     args               optional arguments for tool. Escaping is handled by the lib.
+ * @param     options            optional exec options.  See ExecOptions
+ * @returns   Promise<number>    exit code
+ */
+function exec(commandLine, args, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const commandArgs = tr.argStringToArray(commandLine);
+        if (commandArgs.length === 0) {
+            throw new Error(`Parameter 'commandLine' cannot be null or empty.`);
+        }
+        // Path to tool to execute should be first arg
+        const toolPath = commandArgs[0];
+        args = commandArgs.slice(1).concat(args || []);
+        const runner = new tr.ToolRunner(toolPath, args, options);
+        return runner.exec();
+    });
+}
+exports.exec = exec;
+/**
+ * Exec a command and get the output.
+ * Output will be streamed to the live console.
+ * Returns promise with the exit code and collected stdout and stderr
+ *
+ * @param     commandLine           command to execute (can include additional args). Must be correctly escaped.
+ * @param     args                  optional arguments for tool. Escaping is handled by the lib.
+ * @param     options               optional exec options.  See ExecOptions
+ * @returns   Promise<ExecOutput>   exit code, stdout, and stderr
+ */
+function getExecOutput(commandLine, args, options) {
+    var _a, _b;
+    return __awaiter(this, void 0, void 0, function* () {
+        let stdout = '';
+        let stderr = '';
+        //Using string decoder covers the case where a mult-byte character is split
+        const stdoutDecoder = new string_decoder_1.StringDecoder('utf8');
+        const stderrDecoder = new string_decoder_1.StringDecoder('utf8');
+        const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout;
+        const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr;
+        const stdErrListener = (data) => {
+            stderr += stderrDecoder.write(data);
+            if (originalStdErrListener) {
+                originalStdErrListener(data);
+            }
+        };
+        const stdOutListener = (data) => {
+            stdout += stdoutDecoder.write(data);
+            if (originalStdoutListener) {
+                originalStdoutListener(data);
+            }
+        };
+        const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
+        const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
+        //flush any remaining characters
+        stdout += stdoutDecoder.end();
+        stderr += stderrDecoder.end();
+        return {
+            exitCode,
+            stdout,
+            stderr
+        };
+    });
+}
+exports.getExecOutput = getExecOutput;
+//# sourceMappingURL=exec.js.map
+
+/***/ }),
+
+/***/ 3725:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.argStringToArray = exports.ToolRunner = void 0;
+const os = __importStar(__nccwpck_require__(857));
+const events = __importStar(__nccwpck_require__(4434));
+const child = __importStar(__nccwpck_require__(5317));
+const path = __importStar(__nccwpck_require__(6928));
+const io = __importStar(__nccwpck_require__(3357));
+const ioUtil = __importStar(__nccwpck_require__(2746));
+const timers_1 = __nccwpck_require__(3557);
+/* eslint-disable @typescript-eslint/unbound-method */
+const IS_WINDOWS = process.platform === 'win32';
+/*
+ * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way.
+ */
+class ToolRunner extends events.EventEmitter {
+    constructor(toolPath, args, options) {
+        super();
+        if (!toolPath) {
+            throw new Error("Parameter 'toolPath' cannot be null or empty.");
+        }
+        this.toolPath = toolPath;
+        this.args = args || [];
+        this.options = options || {};
+    }
+    _debug(message) {
+        if (this.options.listeners && this.options.listeners.debug) {
+            this.options.listeners.debug(message);
+        }
+    }
+    _getCommandString(options, noPrefix) {
+        const toolPath = this._getSpawnFileName();
+        const args = this._getSpawnArgs(options);
+        let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool
+        if (IS_WINDOWS) {
+            // Windows + cmd file
+            if (this._isCmdFile()) {
+                cmd += toolPath;
+                for (const a of args) {
+                    cmd += ` ${a}`;
+                }
+            }
+            // Windows + verbatim
+            else if (options.windowsVerbatimArguments) {
+                cmd += `"${toolPath}"`;
+                for (const a of args) {
+                    cmd += ` ${a}`;
+                }
+            }
+            // Windows (regular)
+            else {
+                cmd += this._windowsQuoteCmdArg(toolPath);
+                for (const a of args) {
+                    cmd += ` ${this._windowsQuoteCmdArg(a)}`;
+                }
+            }
+        }
+        else {
+            // OSX/Linux - this can likely be improved with some form of quoting.
+            // creating processes on Unix is fundamentally different than Windows.
+            // on Unix, execvp() takes an arg array.
+            cmd += toolPath;
+            for (const a of args) {
+                cmd += ` ${a}`;
+            }
+        }
+        return cmd;
+    }
+    _processLineBuffer(data, strBuffer, onLine) {
+        try {
+            let s = strBuffer + data.toString();
+            let n = s.indexOf(os.EOL);
+            while (n > -1) {
+                const line = s.substring(0, n);
+                onLine(line);
+                // the rest of the string ...
+                s = s.substring(n + os.EOL.length);
+                n = s.indexOf(os.EOL);
+            }
+            return s;
+        }
+        catch (err) {
+            // streaming lines to console is best effort.  Don't fail a build.
+            this._debug(`error processing line. Failed with error ${err}`);
+            return '';
+        }
+    }
+    _getSpawnFileName() {
+        if (IS_WINDOWS) {
+            if (this._isCmdFile()) {
+                return process.env['COMSPEC'] || 'cmd.exe';
+            }
+        }
+        return this.toolPath;
+    }
+    _getSpawnArgs(options) {
+        if (IS_WINDOWS) {
+            if (this._isCmdFile()) {
+                let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`;
+                for (const a of this.args) {
+                    argline += ' ';
+                    argline += options.windowsVerbatimArguments
+                        ? a
+                        : this._windowsQuoteCmdArg(a);
+                }
+                argline += '"';
+                return [argline];
+            }
+        }
+        return this.args;
+    }
+    _endsWith(str, end) {
+        return str.endsWith(end);
+    }
+    _isCmdFile() {
+        const upperToolPath = this.toolPath.toUpperCase();
+        return (this._endsWith(upperToolPath, '.CMD') ||
+            this._endsWith(upperToolPath, '.BAT'));
+    }
+    _windowsQuoteCmdArg(arg) {
+        // for .exe, apply the normal quoting rules that libuv applies
+        if (!this._isCmdFile()) {
+            return this._uvQuoteCmdArg(arg);
+        }
+        // otherwise apply quoting rules specific to the cmd.exe command line parser.
+        // the libuv rules are generic and are not designed specifically for cmd.exe
+        // command line parser.
+        //
+        // for a detailed description of the cmd.exe command line parser, refer to
+        // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
+        // need quotes for empty arg
+        if (!arg) {
+            return '""';
+        }
+        // determine whether the arg needs to be quoted
+        const cmdSpecialChars = [
+            ' ',
+            '\t',
+            '&',
+            '(',
+            ')',
+            '[',
+            ']',
+            '{',
+            '}',
+            '^',
+            '=',
+            ';',
+            '!',
+            "'",
+            '+',
+            ',',
+            '`',
+            '~',
+            '|',
+            '<',
+            '>',
+            '"'
+        ];
+        let needsQuotes = false;
+        for (const char of arg) {
+            if (cmdSpecialChars.some(x => x === char)) {
+                needsQuotes = true;
+                break;
+            }
+        }
+        // short-circuit if quotes not needed
+        if (!needsQuotes) {
+            return arg;
+        }
+        // the following quoting rules are very similar to the rules that by libuv applies.
+        //
+        // 1) wrap the string in quotes
+        //
+        // 2) double-up quotes - i.e. " => ""
+        //
+        //    this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
+        //    doesn't work well with a cmd.exe command line.
+        //
+        //    note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
+        //    for example, the command line:
+        //          foo.exe "myarg:""my val"""
+        //    is parsed by a .NET console app into an arg array:
+        //          [ "myarg:\"my val\"" ]
+        //    which is the same end result when applying libuv quoting rules. although the actual
+        //    command line from libuv quoting rules would look like:
+        //          foo.exe "myarg:\"my val\""
+        //
+        // 3) double-up slashes that precede a quote,
+        //    e.g.  hello \world    => "hello \world"
+        //          hello\"world    => "hello\\""world"
+        //          hello\\"world   => "hello\\\\""world"
+        //          hello world\    => "hello world\\"
+        //
+        //    technically this is not required for a cmd.exe command line, or the batch argument parser.
+        //    the reasons for including this as a .cmd quoting rule are:
+        //
+        //    a) this is optimized for the scenario where the argument is passed from the .cmd file to an
+        //       external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
+        //
+        //    b) it's what we've been doing previously (by deferring to node default behavior) and we
+        //       haven't heard any complaints about that aspect.
+        //
+        // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
+        // escaped when used on the command line directly - even though within a .cmd file % can be escaped
+        // by using %%.
+        //
+        // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
+        // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
+        //
+        // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
+        // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
+        // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
+        // to an external program.
+        //
+        // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
+        // % can be escaped within a .cmd file.
+        let reverse = '"';
+        let quoteHit = true;
+        for (let i = arg.length; i > 0; i--) {
+            // walk the string in reverse
+            reverse += arg[i - 1];
+            if (quoteHit && arg[i - 1] === '\\') {
+                reverse += '\\'; // double the slash
+            }
+            else if (arg[i - 1] === '"') {
+                quoteHit = true;
+                reverse += '"'; // double the quote
+            }
+            else {
+                quoteHit = false;
+            }
+        }
+        reverse += '"';
+        return reverse
+            .split('')
+            .reverse()
+            .join('');
+    }
+    _uvQuoteCmdArg(arg) {
+        // Tool runner wraps child_process.spawn() and needs to apply the same quoting as
+        // Node in certain cases where the undocumented spawn option windowsVerbatimArguments
+        // is used.
+        //
+        // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
+        // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
+        // pasting copyright notice from Node within this function:
+        //
+        //      Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+        //
+        //      Permission is hereby granted, free of charge, to any person obtaining a copy
+        //      of this software and associated documentation files (the "Software"), to
+        //      deal in the Software without restriction, including without limitation the
+        //      rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+        //      sell copies of the Software, and to permit persons to whom the Software is
+        //      furnished to do so, subject to the following conditions:
+        //
+        //      The above copyright notice and this permission notice shall be included in
+        //      all copies or substantial portions of the Software.
+        //
+        //      THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+        //      IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+        //      FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+        //      AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+        //      LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+        //      FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+        //      IN THE SOFTWARE.
+        if (!arg) {
+            // Need double quotation for empty argument
+            return '""';
+        }
+        if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) {
+            // No quotation needed
+            return arg;
+        }
+        if (!arg.includes('"') && !arg.includes('\\')) {
+            // No embedded double quotes or backslashes, so I can just wrap
+            // quote marks around the whole thing.
+            return `"${arg}"`;
+        }
+        // Expected input/output:
+        //   input : hello"world
+        //   output: "hello\"world"
+        //   input : hello""world
+        //   output: "hello\"\"world"
+        //   input : hello\world
+        //   output: hello\world
+        //   input : hello\\world
+        //   output: hello\\world
+        //   input : hello\"world
+        //   output: "hello\\\"world"
+        //   input : hello\\"world
+        //   output: "hello\\\\\"world"
+        //   input : hello world\
+        //   output: "hello world\\" - note the comment in libuv actually reads "hello world\"
+        //                             but it appears the comment is wrong, it should be "hello world\\"
+        let reverse = '"';
+        let quoteHit = true;
+        for (let i = arg.length; i > 0; i--) {
+            // walk the string in reverse
+            reverse += arg[i - 1];
+            if (quoteHit && arg[i - 1] === '\\') {
+                reverse += '\\';
+            }
+            else if (arg[i - 1] === '"') {
+                quoteHit = true;
+                reverse += '\\';
+            }
+            else {
+                quoteHit = false;
+            }
+        }
+        reverse += '"';
+        return reverse
+            .split('')
+            .reverse()
+            .join('');
+    }
+    _cloneExecOptions(options) {
+        options = options || {};
+        const result = {
+            cwd: options.cwd || process.cwd(),
+            env: options.env || process.env,
+            silent: options.silent || false,
+            windowsVerbatimArguments: options.windowsVerbatimArguments || false,
+            failOnStdErr: options.failOnStdErr || false,
+            ignoreReturnCode: options.ignoreReturnCode || false,
+            delay: options.delay || 10000
+        };
+        result.outStream = options.outStream || process.stdout;
+        result.errStream = options.errStream || process.stderr;
+        return result;
+    }
+    _getSpawnOptions(options, toolPath) {
+        options = options || {};
+        const result = {};
+        result.cwd = options.cwd;
+        result.env = options.env;
+        result['windowsVerbatimArguments'] =
+            options.windowsVerbatimArguments || this._isCmdFile();
+        if (options.windowsVerbatimArguments) {
+            result.argv0 = `"${toolPath}"`;
+        }
+        return result;
+    }
+    /**
+     * Exec a tool.
+     * Output will be streamed to the live console.
+     * Returns promise with return code
+     *
+     * @param     tool     path to tool to exec
+     * @param     options  optional exec options.  See ExecOptions
+     * @returns   number
+     */
+    exec() {
+        return __awaiter(this, void 0, void 0, function* () {
+            // root the tool path if it is unrooted and contains relative pathing
+            if (!ioUtil.isRooted(this.toolPath) &&
+                (this.toolPath.includes('/') ||
+                    (IS_WINDOWS && this.toolPath.includes('\\')))) {
+                // prefer options.cwd if it is specified, however options.cwd may also need to be rooted
+                this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath);
+            }
+            // if the tool is only a file name, then resolve it from the PATH
+            // otherwise verify it exists (add extension on Windows if necessary)
+            this.toolPath = yield io.which(this.toolPath, true);
+            return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
+                this._debug(`exec tool: ${this.toolPath}`);
+                this._debug('arguments:');
+                for (const arg of this.args) {
+                    this._debug(`   ${arg}`);
+                }
+                const optionsNonNull = this._cloneExecOptions(this.options);
+                if (!optionsNonNull.silent && optionsNonNull.outStream) {
+                    optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL);
+                }
+                const state = new ExecState(optionsNonNull, this.toolPath);
+                state.on('debug', (message) => {
+                    this._debug(message);
+                });
+                if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) {
+                    return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`));
+                }
+                const fileName = this._getSpawnFileName();
+                const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName));
+                let stdbuffer = '';
+                if (cp.stdout) {
+                    cp.stdout.on('data', (data) => {
+                        if (this.options.listeners && this.options.listeners.stdout) {
+                            this.options.listeners.stdout(data);
+                        }
+                        if (!optionsNonNull.silent && optionsNonNull.outStream) {
+                            optionsNonNull.outStream.write(data);
+                        }
+                        stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => {
+                            if (this.options.listeners && this.options.listeners.stdline) {
+                                this.options.listeners.stdline(line);
+                            }
+                        });
+                    });
+                }
+                let errbuffer = '';
+                if (cp.stderr) {
+                    cp.stderr.on('data', (data) => {
+                        state.processStderr = true;
+                        if (this.options.listeners && this.options.listeners.stderr) {
+                            this.options.listeners.stderr(data);
+                        }
+                        if (!optionsNonNull.silent &&
+                            optionsNonNull.errStream &&
+                            optionsNonNull.outStream) {
+                            const s = optionsNonNull.failOnStdErr
+                                ? optionsNonNull.errStream
+                                : optionsNonNull.outStream;
+                            s.write(data);
+                        }
+                        errbuffer = this._processLineBuffer(data, errbuffer, (line) => {
+                            if (this.options.listeners && this.options.listeners.errline) {
+                                this.options.listeners.errline(line);
+                            }
+                        });
+                    });
+                }
+                cp.on('error', (err) => {
+                    state.processError = err.message;
+                    state.processExited = true;
+                    state.processClosed = true;
+                    state.CheckComplete();
+                });
+                cp.on('exit', (code) => {
+                    state.processExitCode = code;
+                    state.processExited = true;
+                    this._debug(`Exit code ${code} received from tool '${this.toolPath}'`);
+                    state.CheckComplete();
+                });
+                cp.on('close', (code) => {
+                    state.processExitCode = code;
+                    state.processExited = true;
+                    state.processClosed = true;
+                    this._debug(`STDIO streams have closed for tool '${this.toolPath}'`);
+                    state.CheckComplete();
+                });
+                state.on('done', (error, exitCode) => {
+                    if (stdbuffer.length > 0) {
+                        this.emit('stdline', stdbuffer);
+                    }
+                    if (errbuffer.length > 0) {
+                        this.emit('errline', errbuffer);
+                    }
+                    cp.removeAllListeners();
+                    if (error) {
+                        reject(error);
+                    }
+                    else {
+                        resolve(exitCode);
+                    }
+                });
+                if (this.options.input) {
+                    if (!cp.stdin) {
+                        throw new Error('child process missing stdin');
+                    }
+                    cp.stdin.end(this.options.input);
+                }
+            }));
+        });
+    }
+}
+exports.ToolRunner = ToolRunner;
+/**
+ * Convert an arg string to an array of args. Handles escaping
+ *
+ * @param    argString   string of arguments
+ * @returns  string[]    array of arguments
+ */
+function argStringToArray(argString) {
+    const args = [];
+    let inQuotes = false;
+    let escaped = false;
+    let arg = '';
+    function append(c) {
+        // we only escape double quotes.
+        if (escaped && c !== '"') {
+            arg += '\\';
+        }
+        arg += c;
+        escaped = false;
+    }
+    for (let i = 0; i < argString.length; i++) {
+        const c = argString.charAt(i);
+        if (c === '"') {
+            if (!escaped) {
+                inQuotes = !inQuotes;
+            }
+            else {
+                append(c);
+            }
+            continue;
+        }
+        if (c === '\\' && escaped) {
+            append(c);
+            continue;
+        }
+        if (c === '\\' && inQuotes) {
+            escaped = true;
+            continue;
+        }
+        if (c === ' ' && !inQuotes) {
+            if (arg.length > 0) {
+                args.push(arg);
+                arg = '';
+            }
+            continue;
+        }
+        append(c);
+    }
+    if (arg.length > 0) {
+        args.push(arg.trim());
+    }
+    return args;
+}
+exports.argStringToArray = argStringToArray;
+class ExecState extends events.EventEmitter {
+    constructor(options, toolPath) {
+        super();
+        this.processClosed = false; // tracks whether the process has exited and stdio is closed
+        this.processError = '';
+        this.processExitCode = 0;
+        this.processExited = false; // tracks whether the process has exited
+        this.processStderr = false; // tracks whether stderr was written to
+        this.delay = 10000; // 10 seconds
+        this.done = false;
+        this.timeout = null;
+        if (!toolPath) {
+            throw new Error('toolPath must not be empty');
+        }
+        this.options = options;
+        this.toolPath = toolPath;
+        if (options.delay) {
+            this.delay = options.delay;
+        }
+    }
+    CheckComplete() {
+        if (this.done) {
+            return;
+        }
+        if (this.processClosed) {
+            this._setResult();
+        }
+        else if (this.processExited) {
+            this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this);
+        }
+    }
+    _debug(message) {
+        this.emit('debug', message);
+    }
+    _setResult() {
+        // determine whether there is an error
+        let error;
+        if (this.processExited) {
+            if (this.processError) {
+                error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`);
+            }
+            else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) {
+                error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`);
+            }
+            else if (this.processStderr && this.options.failOnStdErr) {
+                error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`);
+            }
+        }
+        // clear the timeout
+        if (this.timeout) {
+            clearTimeout(this.timeout);
+            this.timeout = null;
+        }
+        this.done = true;
+        this.emit('done', error, this.processExitCode);
+    }
+    static HandleTimeout(state) {
+        if (state.done) {
+            return;
+        }
+        if (!state.processClosed && state.processExited) {
+            const message = `The STDIO streams did not close within ${state.delay /
+                1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`;
+            state._debug(message);
+        }
+        state._setResult();
+    }
+}
+//# sourceMappingURL=toolrunner.js.map
+
+/***/ }),
+
+/***/ 6011:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.create = void 0;
+const internal_globber_1 = __nccwpck_require__(1258);
+/**
+ * Constructs a globber
+ *
+ * @param patterns  Patterns separated by newlines
+ * @param options   Glob options
+ */
+function create(patterns, options) {
+    return __awaiter(this, void 0, void 0, function* () {
+        return yield internal_globber_1.DefaultGlobber.create(patterns, options);
+    });
+}
+exports.create = create;
+//# sourceMappingURL=glob.js.map
+
+/***/ }),
+
+/***/ 6665:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOptions = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+/**
+ * Returns a copy with defaults filled in.
+ */
+function getOptions(copy) {
+    const result = {
+        followSymbolicLinks: true,
+        implicitDescendants: true,
+        omitBrokenSymbolicLinks: true
+    };
+    if (copy) {
+        if (typeof copy.followSymbolicLinks === 'boolean') {
+            result.followSymbolicLinks = copy.followSymbolicLinks;
+            core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
+        }
+        if (typeof copy.implicitDescendants === 'boolean') {
+            result.implicitDescendants = copy.implicitDescendants;
+            core.debug(`implicitDescendants '${result.implicitDescendants}'`);
+        }
+        if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
+            result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
+            core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
+        }
+    }
+    return result;
+}
+exports.getOptions = getOptions;
+//# sourceMappingURL=internal-glob-options-helper.js.map
+
+/***/ }),
+
+/***/ 1258:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+    if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+    var m = o[Symbol.asyncIterator], i;
+    return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+    function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+    function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
+var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
+    if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+    var g = generator.apply(thisArg, _arguments || []), i, q = [];
+    return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+    function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+    function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+    function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+    function fulfill(value) { resume("next", value); }
+    function reject(value) { resume("throw", value); }
+    function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DefaultGlobber = void 0;
+const core = __importStar(__nccwpck_require__(9999));
+const fs = __importStar(__nccwpck_require__(9896));
+const globOptionsHelper = __importStar(__nccwpck_require__(6665));
+const path = __importStar(__nccwpck_require__(6928));
+const patternHelper = __importStar(__nccwpck_require__(3320));
+const internal_match_kind_1 = __nccwpck_require__(5843);
+const internal_pattern_1 = __nccwpck_require__(8887);
+const internal_search_state_1 = __nccwpck_require__(865);
+const IS_WINDOWS = process.platform === 'win32';
+class DefaultGlobber {
+    constructor(options) {
+        this.patterns = [];
+        this.searchPaths = [];
+        this.options = globOptionsHelper.getOptions(options);
+    }
+    getSearchPaths() {
+        // Return a copy
+        return this.searchPaths.slice();
+    }
+    glob() {
+        var e_1, _a;
+        return __awaiter(this, void 0, void 0, function* () {
+            const result = [];
+            try {
+                for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
+                    const itemPath = _c.value;
+                    result.push(itemPath);
+                }
+            }
+            catch (e_1_1) { e_1 = { error: e_1_1 }; }
+            finally {
+                try {
+                    if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
+                }
+                finally { if (e_1) throw e_1.error; }
+            }
+            return result;
+        });
+    }
+    globGenerator() {
+        return __asyncGenerator(this, arguments, function* globGenerator_1() {
+            // Fill in defaults options
+            const options = globOptionsHelper.getOptions(this.options);
+            // Implicit descendants?
+            const patterns = [];
+            for (const pattern of this.patterns) {
+                patterns.push(pattern);
+                if (options.implicitDescendants &&
+                    (pattern.trailingSeparator ||
+                        pattern.segments[pattern.segments.length - 1] !== '**')) {
+                    patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
+                }
+            }
+            // Push the search paths
+            const stack = [];
+            for (const searchPath of patternHelper.getSearchPaths(patterns)) {
+                core.debug(`Search path '${searchPath}'`);
+                // Exists?
+                try {
+                    // Intentionally using lstat. Detection for broken symlink
+                    // will be performed later (if following symlinks).
+                    yield __await(fs.promises.lstat(searchPath));
+                }
+                catch (err) {
+                    if (err.code === 'ENOENT') {
+                        continue;
+                    }
+                    throw err;
+                }
+                stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
+            }
+            // Search
+            const traversalChain = []; // used to detect cycles
+            while (stack.length) {
+                // Pop
+                const item = stack.pop();
+                // Match?
+                const match = patternHelper.match(patterns, item.path);
+                const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
+                if (!match && !partialMatch) {
+                    continue;
+                }
+                // Stat
+                const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
+                // Broken symlink, or symlink cycle detected, or no longer exists
+                );
+                // Broken symlink, or symlink cycle detected, or no longer exists
+                if (!stats) {
+                    continue;
+                }
+                // Directory
+                if (stats.isDirectory()) {
+                    // Matched
+                    if (match & internal_match_kind_1.MatchKind.Directory) {
+                        yield yield __await(item.path);
+                    }
+                    // Descend?
+                    else if (!partialMatch) {
+                        continue;
+                    }
+                    // Push the child items in reverse
+                    const childLevel = item.level + 1;
+                    const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
+                    stack.push(...childItems.reverse());
+                }
+                // File
+                else if (match & internal_match_kind_1.MatchKind.File) {
+                    yield yield __await(item.path);
+                }
+            }
+        });
+    }
+    /**
+     * Constructs a DefaultGlobber
+     */
+    static create(patterns, options) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const result = new DefaultGlobber(options);
+            if (IS_WINDOWS) {
+                patterns = patterns.replace(/\r\n/g, '\n');
+                patterns = patterns.replace(/\r/g, '\n');
+            }
+            const lines = patterns.split('\n').map(x => x.trim());
+            for (const line of lines) {
+                // Empty or comment
+                if (!line || line.startsWith('#')) {
+                    continue;
+                }
+                // Pattern
+                else {
+                    result.patterns.push(new internal_pattern_1.Pattern(line));
+                }
+            }
+            result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
+            return result;
+        });
+    }
+    static stat(item, options, traversalChain) {
+        return __awaiter(this, void 0, void 0, function* () {
+            // Note:
+            // `stat` returns info about the target of a symlink (or symlink chain)
+            // `lstat` returns info about a symlink itself
+            let stats;
+            if (options.followSymbolicLinks) {
+                try {
+                    // Use `stat` (following symlinks)
+                    stats = yield fs.promises.stat(item.path);
+                }
+                catch (err) {
+                    if (err.code === 'ENOENT') {
+                        if (options.omitBrokenSymbolicLinks) {
+                            core.debug(`Broken symlink '${item.path}'`);
+                            return undefined;
+                        }
+                        throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
+                    }
+                    throw err;
+                }
+            }
+            else {
+                // Use `lstat` (not following symlinks)
+                stats = yield fs.promises.lstat(item.path);
+            }
+            // Note, isDirectory() returns false for the lstat of a symlink
+            if (stats.isDirectory() && options.followSymbolicLinks) {
+                // Get the realpath
+                const realPath = yield fs.promises.realpath(item.path);
+                // Fixup the traversal chain to match the item level
+                while (traversalChain.length >= item.level) {
+                    traversalChain.pop();
+                }
+                // Test for a cycle
+                if (traversalChain.some((x) => x === realPath)) {
+                    core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
+                    return undefined;
+                }
+                // Update the traversal chain
+                traversalChain.push(realPath);
+            }
+            return stats;
+        });
+    }
+}
+exports.DefaultGlobber = DefaultGlobber;
+//# sourceMappingURL=internal-globber.js.map
+
+/***/ }),
+
+/***/ 5843:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.MatchKind = void 0;
+/**
+ * Indicates whether a pattern matches a path
+ */
+var MatchKind;
+(function (MatchKind) {
+    /** Not matched */
+    MatchKind[MatchKind["None"] = 0] = "None";
+    /** Matched if the path is a directory */
+    MatchKind[MatchKind["Directory"] = 1] = "Directory";
+    /** Matched if the path is a regular file */
+    MatchKind[MatchKind["File"] = 2] = "File";
+    /** Matched */
+    MatchKind[MatchKind["All"] = 3] = "All";
+})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
+//# sourceMappingURL=internal-match-kind.js.map
+
+/***/ }),
+
+/***/ 3051:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
+const path = __importStar(__nccwpck_require__(6928));
+const assert_1 = __importDefault(__nccwpck_require__(2613));
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
+ *
+ * For example, on Linux/macOS:
+ * - `/               => /`
+ * - `/hello          => /`
+ *
+ * For example, on Windows:
+ * - `C:\             => C:\`
+ * - `C:\hello        => C:\`
+ * - `C:              => C:`
+ * - `C:hello         => C:`
+ * - `\               => \`
+ * - `\hello          => \`
+ * - `\\hello         => \\hello`
+ * - `\\hello\world   => \\hello\world`
+ */
+function dirname(p) {
+    // Normalize slashes and trim unnecessary trailing slash
+    p = safeTrimTrailingSeparator(p);
+    // Windows UNC root, e.g. \\hello or \\hello\world
+    if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
+        return p;
+    }
+    // Get dirname
+    let result = path.dirname(p);
+    // Trim trailing slash for Windows UNC root, e.g. \\hello\world\
+    if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
+        result = safeTrimTrailingSeparator(result);
+    }
+    return result;
+}
+exports.dirname = dirname;
+/**
+ * Roots the path if not already rooted. On Windows, relative roots like `\`
+ * or `C:` are expanded based on the current working directory.
+ */
+function ensureAbsoluteRoot(root, itemPath) {
+    assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
+    assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
+    // Already rooted
+    if (hasAbsoluteRoot(itemPath)) {
+        return itemPath;
+    }
+    // Windows
+    if (IS_WINDOWS) {
+        // Check for itemPath like C: or C:foo
+        if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
+            let cwd = process.cwd();
+            assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
+            // Drive letter matches cwd? Expand to cwd
+            if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
+                // Drive only, e.g. C:
+                if (itemPath.length === 2) {
+                    // Preserve specified drive letter case (upper or lower)
+                    return `${itemPath[0]}:\\${cwd.substr(3)}`;
+                }
+                // Drive + path, e.g. C:foo
+                else {
+                    if (!cwd.endsWith('\\')) {
+                        cwd += '\\';
+                    }
+                    // Preserve specified drive letter case (upper or lower)
+                    return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`;
+                }
+            }
+            // Different drive
+            else {
+                return `${itemPath[0]}:\\${itemPath.substr(2)}`;
+            }
+        }
+        // Check for itemPath like \ or \foo
+        else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
+            const cwd = process.cwd();
+            assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
+            return `${cwd[0]}:\\${itemPath.substr(1)}`;
+        }
+    }
+    assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
+    // Otherwise ensure root ends with a separator
+    if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
+        // Intentionally empty
+    }
+    else {
+        // Append separator
+        root += path.sep;
+    }
+    return root + itemPath;
+}
+exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
+/**
+ * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
+ * `\\hello\share` and `C:\hello` (and using alternate separator).
+ */
+function hasAbsoluteRoot(itemPath) {
+    assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
+    // Normalize separators
+    itemPath = normalizeSeparators(itemPath);
+    // Windows
+    if (IS_WINDOWS) {
+        // E.g. \\hello\share or C:\hello
+        return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath);
+    }
+    // E.g. /hello
+    return itemPath.startsWith('/');
+}
+exports.hasAbsoluteRoot = hasAbsoluteRoot;
+/**
+ * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
+ * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
+ */
+function hasRoot(itemPath) {
+    assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
+    // Normalize separators
+    itemPath = normalizeSeparators(itemPath);
+    // Windows
+    if (IS_WINDOWS) {
+        // E.g. \ or \hello or \\hello
+        // E.g. C: or C:\hello
+        return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath);
+    }
+    // E.g. /hello
+    return itemPath.startsWith('/');
+}
+exports.hasRoot = hasRoot;
+/**
+ * Removes redundant slashes and converts `/` to `\` on Windows
+ */
+function normalizeSeparators(p) {
+    p = p || '';
+    // Windows
+    if (IS_WINDOWS) {
+        // Convert slashes on Windows
+        p = p.replace(/\//g, '\\');
+        // Remove redundant slashes
+        const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello
+        return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC
+    }
+    // Remove redundant slashes
+    return p.replace(/\/\/+/g, '/');
+}
+exports.normalizeSeparators = normalizeSeparators;
+/**
+ * Normalizes the path separators and trims the trailing separator (when safe).
+ * For example, `/foo/ => /foo` but `/ => /`
+ */
+function safeTrimTrailingSeparator(p) {
+    // Short-circuit if empty
+    if (!p) {
+        return '';
+    }
+    // Normalize separators
+    p = normalizeSeparators(p);
+    // No trailing slash
+    if (!p.endsWith(path.sep)) {
+        return p;
+    }
+    // Check '/' on Linux/macOS and '\' on Windows
+    if (p === path.sep) {
+        return p;
+    }
+    // On Windows check if drive root. E.g. C:\
+    if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
+        return p;
+    }
+    // Otherwise trim trailing slash
+    return p.substr(0, p.length - 1);
+}
+exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
+//# sourceMappingURL=internal-path-helper.js.map
+
+/***/ }),
+
+/***/ 186:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Path = void 0;
+const path = __importStar(__nccwpck_require__(6928));
+const pathHelper = __importStar(__nccwpck_require__(3051));
+const assert_1 = __importDefault(__nccwpck_require__(2613));
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Helper class for parsing paths into segments
+ */
+class Path {
+    /**
+     * Constructs a Path
+     * @param itemPath Path or array of segments
+     */
+    constructor(itemPath) {
+        this.segments = [];
+        // String
+        if (typeof itemPath === 'string') {
+            assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
+            // Normalize slashes and trim unnecessary trailing slash
+            itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+            // Not rooted
+            if (!pathHelper.hasRoot(itemPath)) {
+                this.segments = itemPath.split(path.sep);
+            }
+            // Rooted
+            else {
+                // Add all segments, while not at the root
+                let remaining = itemPath;
+                let dir = pathHelper.dirname(remaining);
+                while (dir !== remaining) {
+                    // Add the segment
+                    const basename = path.basename(remaining);
+                    this.segments.unshift(basename);
+                    // Truncate the last segment
+                    remaining = dir;
+                    dir = pathHelper.dirname(remaining);
+                }
+                // Remainder is the root
+                this.segments.unshift(remaining);
+            }
+        }
+        // Array
+        else {
+            // Must not be empty
+            assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
+            // Each segment
+            for (let i = 0; i < itemPath.length; i++) {
+                let segment = itemPath[i];
+                // Must not be empty
+                assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
+                // Normalize slashes
+                segment = pathHelper.normalizeSeparators(itemPath[i]);
+                // Root segment
+                if (i === 0 && pathHelper.hasRoot(segment)) {
+                    segment = pathHelper.safeTrimTrailingSeparator(segment);
+                    assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
+                    this.segments.push(segment);
+                }
+                // All other segments
+                else {
+                    // Must not contain slash
+                    assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
+                    this.segments.push(segment);
+                }
+            }
+        }
+    }
+    /**
+     * Converts the path to it's string representation
+     */
+    toString() {
+        // First segment
+        let result = this.segments[0];
+        // All others
+        let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
+        for (let i = 1; i < this.segments.length; i++) {
+            if (skipSlash) {
+                skipSlash = false;
+            }
+            else {
+                result += path.sep;
+            }
+            result += this.segments[i];
+        }
+        return result;
+    }
+}
+exports.Path = Path;
+//# sourceMappingURL=internal-path.js.map
+
+/***/ }),
+
+/***/ 3320:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
+const pathHelper = __importStar(__nccwpck_require__(3051));
+const internal_match_kind_1 = __nccwpck_require__(5843);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Given an array of patterns, returns an array of paths to search.
+ * Duplicates and paths under other included paths are filtered out.
+ */
+function getSearchPaths(patterns) {
+    // Ignore negate patterns
+    patterns = patterns.filter(x => !x.negate);
+    // Create a map of all search paths
+    const searchPathMap = {};
+    for (const pattern of patterns) {
+        const key = IS_WINDOWS
+            ? pattern.searchPath.toUpperCase()
+            : pattern.searchPath;
+        searchPathMap[key] = 'candidate';
+    }
+    const result = [];
+    for (const pattern of patterns) {
+        // Check if already included
+        const key = IS_WINDOWS
+            ? pattern.searchPath.toUpperCase()
+            : pattern.searchPath;
+        if (searchPathMap[key] === 'included') {
+            continue;
+        }
+        // Check for an ancestor search path
+        let foundAncestor = false;
+        let tempKey = key;
+        let parent = pathHelper.dirname(tempKey);
+        while (parent !== tempKey) {
+            if (searchPathMap[parent]) {
+                foundAncestor = true;
+                break;
+            }
+            tempKey = parent;
+            parent = pathHelper.dirname(tempKey);
+        }
+        // Include the search pattern in the result
+        if (!foundAncestor) {
+            result.push(pattern.searchPath);
+            searchPathMap[key] = 'included';
+        }
+    }
+    return result;
+}
+exports.getSearchPaths = getSearchPaths;
+/**
+ * Matches the patterns against the path
+ */
+function match(patterns, itemPath) {
+    let result = internal_match_kind_1.MatchKind.None;
+    for (const pattern of patterns) {
+        if (pattern.negate) {
+            result &= ~pattern.match(itemPath);
+        }
+        else {
+            result |= pattern.match(itemPath);
+        }
+    }
+    return result;
+}
+exports.match = match;
+/**
+ * Checks whether to descend further into the directory
+ */
+function partialMatch(patterns, itemPath) {
+    return patterns.some(x => !x.negate && x.partialMatch(itemPath));
+}
+exports.partialMatch = partialMatch;
+//# sourceMappingURL=internal-pattern-helper.js.map
+
+/***/ }),
+
+/***/ 8887:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Pattern = void 0;
+const os = __importStar(__nccwpck_require__(857));
+const path = __importStar(__nccwpck_require__(6928));
+const pathHelper = __importStar(__nccwpck_require__(3051));
+const assert_1 = __importDefault(__nccwpck_require__(2613));
+const minimatch_1 = __nccwpck_require__(6274);
+const internal_match_kind_1 = __nccwpck_require__(5843);
+const internal_path_1 = __nccwpck_require__(186);
+const IS_WINDOWS = process.platform === 'win32';
+class Pattern {
+    constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
+        /**
+         * Indicates whether matches should be excluded from the result set
+         */
+        this.negate = false;
+        // Pattern overload
+        let pattern;
+        if (typeof patternOrNegate === 'string') {
+            pattern = patternOrNegate.trim();
+        }
+        // Segments overload
+        else {
+            // Convert to pattern
+            segments = segments || [];
+            assert_1.default(segments.length, `Parameter 'segments' must not empty`);
+            const root = Pattern.getLiteral(segments[0]);
+            assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
+            pattern = new internal_path_1.Path(segments).toString().trim();
+            if (patternOrNegate) {
+                pattern = `!${pattern}`;
+            }
+        }
+        // Negate
+        while (pattern.startsWith('!')) {
+            this.negate = !this.negate;
+            pattern = pattern.substr(1).trim();
+        }
+        // Normalize slashes and ensures absolute root
+        pattern = Pattern.fixupPattern(pattern, homedir);
+        // Segments
+        this.segments = new internal_path_1.Path(pattern).segments;
+        // Trailing slash indicates the pattern should only match directories, not regular files
+        this.trailingSeparator = pathHelper
+            .normalizeSeparators(pattern)
+            .endsWith(path.sep);
+        pattern = pathHelper.safeTrimTrailingSeparator(pattern);
+        // Search path (literal path prior to the first glob segment)
+        let foundGlob = false;
+        const searchSegments = this.segments
+            .map(x => Pattern.getLiteral(x))
+            .filter(x => !foundGlob && !(foundGlob = x === ''));
+        this.searchPath = new internal_path_1.Path(searchSegments).toString();
+        // Root RegExp (required when determining partial match)
+        this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
+        this.isImplicitPattern = isImplicitPattern;
+        // Create minimatch
+        const minimatchOptions = {
+            dot: true,
+            nobrace: true,
+            nocase: IS_WINDOWS,
+            nocomment: true,
+            noext: true,
+            nonegate: true
+        };
+        pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern;
+        this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);
+    }
+    /**
+     * Matches the pattern against the specified path
+     */
+    match(itemPath) {
+        // Last segment is globstar?
+        if (this.segments[this.segments.length - 1] === '**') {
+            // Normalize slashes
+            itemPath = pathHelper.normalizeSeparators(itemPath);
+            // Append a trailing slash. Otherwise Minimatch will not match the directory immediately
+            // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
+            // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
+            if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
+                // Note, this is safe because the constructor ensures the pattern has an absolute root.
+                // For example, formats like C: and C:foo on Windows are resolved to an absolute root.
+                itemPath = `${itemPath}${path.sep}`;
+            }
+        }
+        else {
+            // Normalize slashes and trim unnecessary trailing slash
+            itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+        }
+        // Match
+        if (this.minimatch.match(itemPath)) {
+            return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;
+        }
+        return internal_match_kind_1.MatchKind.None;
+    }
+    /**
+     * Indicates whether the pattern may match descendants of the specified path
+     */
+    partialMatch(itemPath) {
+        // Normalize slashes and trim unnecessary trailing slash
+        itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+        // matchOne does not handle root path correctly
+        if (pathHelper.dirname(itemPath) === itemPath) {
+            return this.rootRegExp.test(itemPath);
+        }
+        return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true);
+    }
+    /**
+     * Escapes glob patterns within a path
+     */
+    static globEscape(s) {
+        return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS
+            .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment
+            .replace(/\?/g, '[?]') // escape '?'
+            .replace(/\*/g, '[*]'); // escape '*'
+    }
+    /**
+     * Normalizes slashes and ensures absolute root
+     */
+    static fixupPattern(pattern, homedir) {
+        // Empty
+        assert_1.default(pattern, 'pattern cannot be empty');
+        // Must not contain `.` segment, unless first segment
+        // Must not contain `..` segment
+        const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
+        assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
+        // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
+        assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
+        // Normalize slashes
+        pattern = pathHelper.normalizeSeparators(pattern);
+        // Replace leading `.` segment
+        if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {
+            pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);
+        }
+        // Replace leading `~` segment
+        else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
+            homedir = homedir || os.homedir();
+            assert_1.default(homedir, 'Unable to determine HOME directory');
+            assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
+            pattern = Pattern.globEscape(homedir) + pattern.substr(1);
+        }
+        // Replace relative drive root, e.g. pattern is C: or C:foo
+        else if (IS_WINDOWS &&
+            (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) {
+            let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2));
+            if (pattern.length > 2 && !root.endsWith('\\')) {
+                root += '\\';
+            }
+            pattern = Pattern.globEscape(root) + pattern.substr(2);
+        }
+        // Replace relative root, e.g. pattern is \ or \foo
+        else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) {
+            let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\');
+            if (!root.endsWith('\\')) {
+                root += '\\';
+            }
+            pattern = Pattern.globEscape(root) + pattern.substr(1);
+        }
+        // Otherwise ensure absolute root
+        else {
+            pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);
+        }
+        return pathHelper.normalizeSeparators(pattern);
+    }
+    /**
+     * Attempts to unescape a pattern segment to create a literal path segment.
+     * Otherwise returns empty string.
+     */
+    static getLiteral(segment) {
+        let literal = '';
+        for (let i = 0; i < segment.length; i++) {
+            const c = segment[i];
+            // Escape
+            if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) {
+                literal += segment[++i];
+                continue;
+            }
+            // Wildcard
+            else if (c === '*' || c === '?') {
+                return '';
+            }
+            // Character set
+            else if (c === '[' && i + 1 < segment.length) {
+                let set = '';
+                let closed = -1;
+                for (let i2 = i + 1; i2 < segment.length; i2++) {
+                    const c2 = segment[i2];
+                    // Escape
+                    if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) {
+                        set += segment[++i2];
+                        continue;
+                    }
+                    // Closed
+                    else if (c2 === ']') {
+                        closed = i2;
+                        break;
+                    }
+                    // Otherwise
+                    else {
+                        set += c2;
+                    }
+                }
+                // Closed?
+                if (closed >= 0) {
+                    // Cannot convert
+                    if (set.length > 1) {
+                        return '';
+                    }
+                    // Convert to literal
+                    if (set) {
+                        literal += set;
+                        i = closed;
+                        continue;
+                    }
+                }
+                // Otherwise fall thru
+            }
+            // Append
+            literal += c;
+        }
+        return literal;
+    }
+    /**
+     * Escapes regexp special characters
+     * https://javascript.info/regexp-escaping
+     */
+    static regExpEscape(s) {
+        return s.replace(/[[\\^$.|?*+()]/g, '\\$&');
+    }
+}
+exports.Pattern = Pattern;
+//# sourceMappingURL=internal-pattern.js.map
+
+/***/ }),
+
+/***/ 865:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.SearchState = void 0;
+class SearchState {
+    constructor(path, level) {
+        this.path = path;
+        this.level = level;
+    }
+}
+exports.SearchState = SearchState;
+//# sourceMappingURL=internal-search-state.js.map
+
+/***/ }),
+
+/***/ 3673:
+/***/ (function(__unused_webpack_module, exports) {
+
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
+class BasicCredentialHandler {
+    constructor(username, password) {
+        this.username = username;
+        this.password = password;
+    }
+    prepareRequest(options) {
+        if (!options.headers) {
+            throw Error('The request has no headers');
+        }
+        options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
+    }
+    // This handler cannot handle 401
+    canHandleAuthentication() {
+        return false;
+    }
+    handleAuthentication() {
+        return __awaiter(this, void 0, void 0, function* () {
+            throw new Error('not implemented');
+        });
+    }
+}
+exports.BasicCredentialHandler = BasicCredentialHandler;
+class BearerCredentialHandler {
+    constructor(token) {
+        this.token = token;
+    }
+    // currently implements pre-authorization
+    // TODO: support preAuth = false where it hooks on 401
+    prepareRequest(options) {
+        if (!options.headers) {
+            throw Error('The request has no headers');
+        }
+        options.headers['Authorization'] = `Bearer ${this.token}`;
+    }
+    // This handler cannot handle 401
+    canHandleAuthentication() {
+        return false;
+    }
+    handleAuthentication() {
+        return __awaiter(this, void 0, void 0, function* () {
+            throw new Error('not implemented');
+        });
+    }
+}
+exports.BearerCredentialHandler = BearerCredentialHandler;
+class PersonalAccessTokenCredentialHandler {
+    constructor(token) {
+        this.token = token;
+    }
+    // currently implements pre-authorization
+    // TODO: support preAuth = false where it hooks on 401
+    prepareRequest(options) {
+        if (!options.headers) {
+            throw Error('The request has no headers');
+        }
+        options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
+    }
+    // This handler cannot handle 401
+    canHandleAuthentication() {
+        return false;
+    }
+    handleAuthentication() {
+        return __awaiter(this, void 0, void 0, function* () {
+            throw new Error('not implemented');
+        });
+    }
+}
+exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
+//# sourceMappingURL=auth.js.map
+
+/***/ }),
+
+/***/ 787:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+/* eslint-disable @typescript-eslint/no-explicit-any */
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
+const http = __importStar(__nccwpck_require__(8611));
+const https = __importStar(__nccwpck_require__(5692));
+const pm = __importStar(__nccwpck_require__(7407));
+const tunnel = __importStar(__nccwpck_require__(6124));
+const undici_1 = __nccwpck_require__(1909);
+var HttpCodes;
+(function (HttpCodes) {
+    HttpCodes[HttpCodes["OK"] = 200] = "OK";
+    HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+    HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+    HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+    HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+    HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+    HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+    HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+    HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+    HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+    HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+    HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+    HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+    HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+    HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+    HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+    HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+    HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+    HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+    HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+    HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+    HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+    HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+    HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+    HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+    HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+    HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
+var Headers;
+(function (Headers) {
+    Headers["Accept"] = "accept";
+    Headers["ContentType"] = "content-type";
+})(Headers || (exports.Headers = Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+    MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
+/**
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl  The server URL where the request will be sent. For example, https://api.github.com
+ */
+function getProxyUrl(serverUrl) {
+    const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
+    return proxyUrl ? proxyUrl.href : '';
+}
+exports.getProxyUrl = getProxyUrl;
+const HttpRedirectCodes = [
+    HttpCodes.MovedPermanently,
+    HttpCodes.ResourceMoved,
+    HttpCodes.SeeOther,
+    HttpCodes.TemporaryRedirect,
+    HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+    HttpCodes.BadGateway,
+    HttpCodes.ServiceUnavailable,
+    HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientError extends Error {
+    constructor(message, statusCode) {
+        super(message);
+        this.name = 'HttpClientError';
+        this.statusCode = statusCode;
+        Object.setPrototypeOf(this, HttpClientError.prototype);
+    }
+}
+exports.HttpClientError = HttpClientError;
+class HttpClientResponse {
+    constructor(message) {
+        this.message = message;
+    }
+    readBody() {
+        return __awaiter(this, void 0, void 0, function* () {
+            return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+                let output = Buffer.alloc(0);
+                this.message.on('data', (chunk) => {
+                    output = Buffer.concat([output, chunk]);
+                });
+                this.message.on('end', () => {
+                    resolve(output.toString());
+                });
+            }));
+        });
+    }
+    readBodyBuffer() {
+        return __awaiter(this, void 0, void 0, function* () {
+            return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+                const chunks = [];
+                this.message.on('data', (chunk) => {
+                    chunks.push(chunk);
+                });
+                this.message.on('end', () => {
+                    resolve(Buffer.concat(chunks));
+                });
+            }));
+        });
+    }
+}
+exports.HttpClientResponse = HttpClientResponse;
+function isHttps(requestUrl) {
+    const parsedUrl = new URL(requestUrl);
+    return parsedUrl.protocol === 'https:';
+}
+exports.isHttps = isHttps;
+class HttpClient {
+    constructor(userAgent, handlers, requestOptions) {
+        this._ignoreSslError = false;
+        this._allowRedirects = true;
+        this._allowRedirectDowngrade = false;
+        this._maxRedirects = 50;
+        this._allowRetries = false;
+        this._maxRetries = 1;
+        this._keepAlive = false;
+        this._disposed = false;
+        this.userAgent = userAgent;
+        this.handlers = handlers || [];
+        this.requestOptions = requestOptions;
+        if (requestOptions) {
+            if (requestOptions.ignoreSslError != null) {
+                this._ignoreSslError = requestOptions.ignoreSslError;
+            }
+            this._socketTimeout = requestOptions.socketTimeout;
+            if (requestOptions.allowRedirects != null) {
+                this._allowRedirects = requestOptions.allowRedirects;
+            }
+            if (requestOptions.allowRedirectDowngrade != null) {
+                this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+            }
+            if (requestOptions.maxRedirects != null) {
+                this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+            }
+            if (requestOptions.keepAlive != null) {
+                this._keepAlive = requestOptions.keepAlive;
+            }
+            if (requestOptions.allowRetries != null) {
+                this._allowRetries = requestOptions.allowRetries;
+            }
+            if (requestOptions.maxRetries != null) {
+                this._maxRetries = requestOptions.maxRetries;
+            }
+        }
+    }
+    options(requestUrl, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+        });
+    }
+    get(requestUrl, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('GET', requestUrl, null, additionalHeaders || {});
+        });
+    }
+    del(requestUrl, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+        });
+    }
+    post(requestUrl, data, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('POST', requestUrl, data, additionalHeaders || {});
+        });
+    }
+    patch(requestUrl, data, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+        });
+    }
+    put(requestUrl, data, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('PUT', requestUrl, data, additionalHeaders || {});
+        });
+    }
+    head(requestUrl, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+        });
+    }
+    sendStream(verb, requestUrl, stream, additionalHeaders) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return this.request(verb, requestUrl, stream, additionalHeaders);
+        });
+    }
+    /**
+     * Gets a typed object from an endpoint
+     * Be aware that not found returns a null.  Other errors (4xx, 5xx) reject the promise
+     */
+    getJson(requestUrl, additionalHeaders = {}) {
+        return __awaiter(this, void 0, void 0, function* () {
+            additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+            const res = yield this.get(requestUrl, additionalHeaders);
+            return this._processResponse(res, this.requestOptions);
+        });
+    }
+    postJson(requestUrl, obj, additionalHeaders = {}) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const data = JSON.stringify(obj, null, 2);
+            additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+            additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+            const res = yield this.post(requestUrl, data, additionalHeaders);
+            return this._processResponse(res, this.requestOptions);
+        });
+    }
+    putJson(requestUrl, obj, additionalHeaders = {}) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const data = JSON.stringify(obj, null, 2);
+            additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+            additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+            const res = yield this.put(requestUrl, data, additionalHeaders);
+            return this._processResponse(res, this.requestOptions);
+        });
+    }
+    patchJson(requestUrl, obj, additionalHeaders = {}) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const data = JSON.stringify(obj, null, 2);
+            additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+            additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+            const res = yield this.patch(requestUrl, data, additionalHeaders);
+            return this._processResponse(res, this.requestOptions);
+        });
+    }
+    /**
+     * Makes a raw http request.
+     * All other methods such as get, post, patch, and request ultimately call this.
+     * Prefer get, del, post and patch
+     */
+    request(verb, requestUrl, data, headers) {
+        return __awaiter(this, void 0, void 0, function* () {
+            if (this._disposed) {
+                throw new Error('Client has already been disposed.');
+            }
+            const parsedUrl = new URL(requestUrl);
+            let info = this._prepareRequest(verb, parsedUrl, headers);
+            // Only perform retries on reads since writes may not be idempotent.
+            const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
+                ? this._maxRetries + 1
+                : 1;
+            let numTries = 0;
+            let response;
+            do {
+                response = yield this.requestRaw(info, data);
+                // Check if it's an authentication challenge
+                if (response &&
+                    response.message &&
+                    response.message.statusCode === HttpCodes.Unauthorized) {
+                    let authenticationHandler;
+                    for (const handler of this.handlers) {
+                        if (handler.canHandleAuthentication(response)) {
+                            authenticationHandler = handler;
+                            break;
+                        }
+                    }
+                    if (authenticationHandler) {
+                        return authenticationHandler.handleAuthentication(this, info, data);
+                    }
+                    else {
+                        // We have received an unauthorized response but have no handlers to handle it.
+                        // Let the response return to the caller.
+                        return response;
+                    }
+                }
+                let redirectsRemaining = this._maxRedirects;
+                while (response.message.statusCode &&
+                    HttpRedirectCodes.includes(response.message.statusCode) &&
+                    this._allowRedirects &&
+                    redirectsRemaining > 0) {
+                    const redirectUrl = response.message.headers['location'];
+                    if (!redirectUrl) {
+                        // if there's no location to redirect to, we won't
+                        break;
+                    }
+                    const parsedRedirectUrl = new URL(redirectUrl);
+                    if (parsedUrl.protocol === 'https:' &&
+                        parsedUrl.protocol !== parsedRedirectUrl.protocol &&
+                        !this._allowRedirectDowngrade) {
+                        throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+                    }
+                    // we need to finish reading the response before reassigning response
+                    // which will leak the open socket.
+                    yield response.readBody();
+                    // strip authorization header if redirected to a different hostname
+                    if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+                        for (const header in headers) {
+                            // header names are case insensitive
+                            if (header.toLowerCase() === 'authorization') {
+                                delete headers[header];
+                            }
+                        }
+                    }
+                    // let's make the request with the new redirectUrl
+                    info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+                    response = yield this.requestRaw(info, data);
+                    redirectsRemaining--;
+                }
+                if (!response.message.statusCode ||
+                    !HttpResponseRetryCodes.includes(response.message.statusCode)) {
+                    // If not a retry code, return immediately instead of retrying
+                    return response;
+                }
+                numTries += 1;
+                if (numTries < maxTries) {
+                    yield response.readBody();
+                    yield this._performExponentialBackoff(numTries);
+                }
+            } while (numTries < maxTries);
+            return response;
+        });
+    }
+    /**
+     * Needs to be called if keepAlive is set to true in request options.
+     */
+    dispose() {
+        if (this._agent) {
+            this._agent.destroy();
+        }
+        this._disposed = true;
+    }
+    /**
+     * Raw request.
+     * @param info
+     * @param data
+     */
+    requestRaw(info, data) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return new Promise((resolve, reject) => {
+                function callbackForResult(err, res) {
+                    if (err) {
+                        reject(err);
+                    }
+                    else if (!res) {
+                        // If `err` is not passed, then `res` must be passed.
+                        reject(new Error('Unknown error'));
+                    }
+                    else {
+                        resolve(res);
+                    }
+                }
+                this.requestRawWithCallback(info, data, callbackForResult);
+            });
+        });
+    }
+    /**
+     * Raw request with callback.
+     * @param info
+     * @param data
+     * @param onResult
+     */
+    requestRawWithCallback(info, data, onResult) {
+        if (typeof data === 'string') {
+            if (!info.options.headers) {
+                info.options.headers = {};
+            }
+            info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+        }
+        let callbackCalled = false;
+        function handleResult(err, res) {
+            if (!callbackCalled) {
+                callbackCalled = true;
+                onResult(err, res);
+            }
+        }
+        const req = info.httpModule.request(info.options, (msg) => {
+            const res = new HttpClientResponse(msg);
+            handleResult(undefined, res);
+        });
+        let socket;
+        req.on('socket', sock => {
+            socket = sock;
+        });
+        // If we ever get disconnected, we want the socket to timeout eventually
+        req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+            if (socket) {
+                socket.end();
+            }
+            handleResult(new Error(`Request timeout: ${info.options.path}`));
+        });
+        req.on('error', function (err) {
+            // err has statusCode property
+            // res should have headers
+            handleResult(err);
+        });
+        if (data && typeof data === 'string') {
+            req.write(data, 'utf8');
+        }
+        if (data && typeof data !== 'string') {
+            data.on('close', function () {
+                req.end();
+            });
+            data.pipe(req);
+        }
+        else {
+            req.end();
+        }
+    }
+    /**
+     * Gets an http agent. This function is useful when you need an http agent that handles
+     * routing through a proxy server - depending upon the url and proxy environment variables.
+     * @param serverUrl  The server URL where the request will be sent. For example, https://api.github.com
+     */
+    getAgent(serverUrl) {
+        const parsedUrl = new URL(serverUrl);
+        return this._getAgent(parsedUrl);
+    }
+    getAgentDispatcher(serverUrl) {
+        const parsedUrl = new URL(serverUrl);
+        const proxyUrl = pm.getProxyUrl(parsedUrl);
+        const useProxy = proxyUrl && proxyUrl.hostname;
+        if (!useProxy) {
+            return;
+        }
+        return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
+    }
+    _prepareRequest(method, requestUrl, headers) {
+        const info = {};
+        info.parsedUrl = requestUrl;
+        const usingSsl = info.parsedUrl.protocol === 'https:';
+        info.httpModule = usingSsl ? https : http;
+        const defaultPort = usingSsl ? 443 : 80;
+        info.options = {};
+        info.options.host = info.parsedUrl.hostname;
+        info.options.port = info.parsedUrl.port
+            ? parseInt(info.parsedUrl.port)
+            : defaultPort;
+        info.options.path =
+            (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+        info.options.method = method;
+        info.options.headers = this._mergeHeaders(headers);
+        if (this.userAgent != null) {
+            info.options.headers['user-agent'] = this.userAgent;
+        }
+        info.options.agent = this._getAgent(info.parsedUrl);
+        // gives handlers an opportunity to participate
+        if (this.handlers) {
+            for (const handler of this.handlers) {
+                handler.prepareRequest(info.options);
+            }
+        }
+        return info;
+    }
+    _mergeHeaders(headers) {
+        if (this.requestOptions && this.requestOptions.headers) {
+            return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
+        }
+        return lowercaseKeys(headers || {});
+    }
+    _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+        let clientHeader;
+        if (this.requestOptions && this.requestOptions.headers) {
+            clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
+        }
+        return additionalHeaders[header] || clientHeader || _default;
+    }
+    _getAgent(parsedUrl) {
+        let agent;
+        const proxyUrl = pm.getProxyUrl(parsedUrl);
+        const useProxy = proxyUrl && proxyUrl.hostname;
+        if (this._keepAlive && useProxy) {
+            agent = this._proxyAgent;
+        }
+        if (!useProxy) {
+            agent = this._agent;
+        }
+        // if agent is already assigned use that agent.
+        if (agent) {
+            return agent;
+        }
+        const usingSsl = parsedUrl.protocol === 'https:';
+        let maxSockets = 100;
+        if (this.requestOptions) {
+            maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
+        }
+        // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
+        if (proxyUrl && proxyUrl.hostname) {
+            const agentOptions = {
+                maxSockets,
+                keepAlive: this._keepAlive,
+                proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
+                    proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
+                })), { host: proxyUrl.hostname, port: proxyUrl.port })
+            };
+            let tunnelAgent;
+            const overHttps = proxyUrl.protocol === 'https:';
+            if (usingSsl) {
+                tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+            }
+            else {
+                tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+            }
+            agent = tunnelAgent(agentOptions);
+            this._proxyAgent = agent;
+        }
+        // if tunneling agent isn't assigned create a new agent
+        if (!agent) {
+            const options = { keepAlive: this._keepAlive, maxSockets };
+            agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
+            this._agent = agent;
+        }
+        if (usingSsl && this._ignoreSslError) {
+            // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+            // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+            // we have to cast it to any and change it directly
+            agent.options = Object.assign(agent.options || {}, {
+                rejectUnauthorized: false
+            });
+        }
+        return agent;
+    }
+    _getProxyAgentDispatcher(parsedUrl, proxyUrl) {
+        let proxyAgent;
+        if (this._keepAlive) {
+            proxyAgent = this._proxyAgentDispatcher;
+        }
+        // if agent is already assigned use that agent.
+        if (proxyAgent) {
+            return proxyAgent;
+        }
+        const usingSsl = parsedUrl.protocol === 'https:';
+        proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
+            token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}`
+        })));
+        this._proxyAgentDispatcher = proxyAgent;
+        if (usingSsl && this._ignoreSslError) {
+            // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+            // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+            // we have to cast it to any and change it directly
+            proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
+                rejectUnauthorized: false
+            });
+        }
+        return proxyAgent;
+    }
+    _performExponentialBackoff(retryNumber) {
+        return __awaiter(this, void 0, void 0, function* () {
+            retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+            const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+            return new Promise(resolve => setTimeout(() => resolve(), ms));
+        });
+    }
+    _processResponse(res, options) {
+        return __awaiter(this, void 0, void 0, function* () {
+            return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
+                const statusCode = res.message.statusCode || 0;
+                const response = {
+                    statusCode,
+                    result: null,
+                    headers: {}
+                };
+                // not found leads to null obj returned
+                if (statusCode === HttpCodes.NotFound) {
+                    resolve(response);
+                }
+                // get the result from the body
+                function dateTimeDeserializer(key, value) {
+                    if (typeof value === 'string') {
+                        const a = new Date(value);
+                        if (!isNaN(a.valueOf())) {
+                            return a;
+                        }
+                    }
+                    return value;
+                }
+                let obj;
+                let contents;
+                try {
+                    contents = yield res.readBody();
+                    if (contents && contents.length > 0) {
+                        if (options && options.deserializeDates) {
+                            obj = JSON.parse(contents, dateTimeDeserializer);
+                        }
+                        else {
+                            obj = JSON.parse(contents);
+                        }
+                        response.result = obj;
+                    }
+                    response.headers = res.message.headers;
+                }
+                catch (err) {
+                    // Invalid resource (contents not json);  leaving result obj null
+                }
+                // note that 3xx redirects are handled by the http layer.
+                if (statusCode > 299) {
+                    let msg;
+                    // if exception/error in body, attempt to get better error
+                    if (obj && obj.message) {
+                        msg = obj.message;
+                    }
+                    else if (contents && contents.length > 0) {
+                        // it may be the case that the exception is in the body message as string
+                        msg = contents;
+                    }
+                    else {
+                        msg = `Failed request: (${statusCode})`;
+                    }
+                    const err = new HttpClientError(msg, statusCode);
+                    err.result = response.result;
+                    reject(err);
+                }
+                else {
+                    resolve(response);
+                }
+            }));
+        });
+    }
+}
+exports.HttpClient = HttpClient;
+const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 7407:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.checkBypass = exports.getProxyUrl = void 0;
+function getProxyUrl(reqUrl) {
+    const usingSsl = reqUrl.protocol === 'https:';
+    if (checkBypass(reqUrl)) {
+        return undefined;
+    }
+    const proxyVar = (() => {
+        if (usingSsl) {
+            return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
+        }
+        else {
+            return process.env['http_proxy'] || process.env['HTTP_PROXY'];
+        }
+    })();
+    if (proxyVar) {
+        try {
+            return new DecodedURL(proxyVar);
+        }
+        catch (_a) {
+            if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
+                return new DecodedURL(`http://${proxyVar}`);
+        }
+    }
+    else {
+        return undefined;
+    }
+}
+exports.getProxyUrl = getProxyUrl;
+function checkBypass(reqUrl) {
+    if (!reqUrl.hostname) {
+        return false;
+    }
+    const reqHost = reqUrl.hostname;
+    if (isLoopbackAddress(reqHost)) {
+        return true;
+    }
+    const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
+    if (!noProxy) {
+        return false;
+    }
+    // Determine the request port
+    let reqPort;
+    if (reqUrl.port) {
+        reqPort = Number(reqUrl.port);
+    }
+    else if (reqUrl.protocol === 'http:') {
+        reqPort = 80;
+    }
+    else if (reqUrl.protocol === 'https:') {
+        reqPort = 443;
+    }
+    // Format the request hostname and hostname with port
+    const upperReqHosts = [reqUrl.hostname.toUpperCase()];
+    if (typeof reqPort === 'number') {
+        upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+    }
+    // Compare request host against noproxy
+    for (const upperNoProxyItem of noProxy
+        .split(',')
+        .map(x => x.trim().toUpperCase())
+        .filter(x => x)) {
+        if (upperNoProxyItem === '*' ||
+            upperReqHosts.some(x => x === upperNoProxyItem ||
+                x.endsWith(`.${upperNoProxyItem}`) ||
+                (upperNoProxyItem.startsWith('.') &&
+                    x.endsWith(`${upperNoProxyItem}`)))) {
+            return true;
+        }
+    }
+    return false;
+}
+exports.checkBypass = checkBypass;
+function isLoopbackAddress(host) {
+    const hostLower = host.toLowerCase();
+    return (hostLower === 'localhost' ||
+        hostLower.startsWith('127.') ||
+        hostLower.startsWith('[::1]') ||
+        hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
+}
+class DecodedURL extends URL {
+    constructor(url, base) {
+        super(url, base);
+        this._decodedUsername = decodeURIComponent(super.username);
+        this._decodedPassword = decodeURIComponent(super.password);
+    }
+    get username() {
+        return this._decodedUsername;
+    }
+    get password() {
+        return this._decodedPassword;
+    }
+}
+//# sourceMappingURL=proxy.js.map
+
+/***/ }),
+
+/***/ 2746:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+var _a;
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0;
+const fs = __importStar(__nccwpck_require__(9896));
+const path = __importStar(__nccwpck_require__(6928));
+_a = fs.promises
+// export const {open} = 'fs'
+, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink;
+// export const {open} = 'fs'
+exports.IS_WINDOWS = process.platform === 'win32';
+// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691
+exports.UV_FS_O_EXLOCK = 0x10000000;
+exports.READONLY = fs.constants.O_RDONLY;
+function exists(fsPath) {
+    return __awaiter(this, void 0, void 0, function* () {
+        try {
+            yield exports.stat(fsPath);
+        }
+        catch (err) {
+            if (err.code === 'ENOENT') {
+                return false;
+            }
+            throw err;
+        }
+        return true;
+    });
+}
+exports.exists = exists;
+function isDirectory(fsPath, useStat = false) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath);
+        return stats.isDirectory();
+    });
+}
+exports.isDirectory = isDirectory;
+/**
+ * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like:
+ * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases).
+ */
+function isRooted(p) {
+    p = normalizeSeparators(p);
+    if (!p) {
+        throw new Error('isRooted() parameter "p" cannot be empty');
+    }
+    if (exports.IS_WINDOWS) {
+        return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello
+        ); // e.g. C: or C:\hello
+    }
+    return p.startsWith('/');
+}
+exports.isRooted = isRooted;
+/**
+ * Best effort attempt to determine whether a file exists and is executable.
+ * @param filePath    file path to check
+ * @param extensions  additional file extensions to try
+ * @return if file exists and is executable, returns the file path. otherwise empty string.
+ */
+function tryGetExecutablePath(filePath, extensions) {
+    return __awaiter(this, void 0, void 0, function* () {
+        let stats = undefined;
+        try {
+            // test file exists
+            stats = yield exports.stat(filePath);
+        }
+        catch (err) {
+            if (err.code !== 'ENOENT') {
+                // eslint-disable-next-line no-console
+                console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+            }
+        }
+        if (stats && stats.isFile()) {
+            if (exports.IS_WINDOWS) {
+                // on Windows, test for valid extension
+                const upperExt = path.extname(filePath).toUpperCase();
+                if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) {
+                    return filePath;
+                }
+            }
+            else {
+                if (isUnixExecutable(stats)) {
+                    return filePath;
+                }
+            }
+        }
+        // try each extension
+        const originalFilePath = filePath;
+        for (const extension of extensions) {
+            filePath = originalFilePath + extension;
+            stats = undefined;
+            try {
+                stats = yield exports.stat(filePath);
+            }
+            catch (err) {
+                if (err.code !== 'ENOENT') {
+                    // eslint-disable-next-line no-console
+                    console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`);
+                }
+            }
+            if (stats && stats.isFile()) {
+                if (exports.IS_WINDOWS) {
+                    // preserve the case of the actual file (since an extension was appended)
+                    try {
+                        const directory = path.dirname(filePath);
+                        const upperName = path.basename(filePath).toUpperCase();
+                        for (const actualName of yield exports.readdir(directory)) {
+                            if (upperName === actualName.toUpperCase()) {
+                                filePath = path.join(directory, actualName);
+                                break;
+                            }
+                        }
+                    }
+                    catch (err) {
+                        // eslint-disable-next-line no-console
+                        console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`);
+                    }
+                    return filePath;
+                }
+                else {
+                    if (isUnixExecutable(stats)) {
+                        return filePath;
+                    }
+                }
+            }
+        }
+        return '';
+    });
+}
+exports.tryGetExecutablePath = tryGetExecutablePath;
+function normalizeSeparators(p) {
+    p = p || '';
+    if (exports.IS_WINDOWS) {
+        // convert slashes on Windows
+        p = p.replace(/\//g, '\\');
+        // remove redundant slashes
+        return p.replace(/\\\\+/g, '\\');
+    }
+    // remove redundant slashes
+    return p.replace(/\/\/+/g, '/');
+}
+// on Mac/Linux, test the execute bit
+//     R   W  X  R  W X R W X
+//   256 128 64 32 16 8 4 2 1
+function isUnixExecutable(stats) {
+    return ((stats.mode & 1) > 0 ||
+        ((stats.mode & 8) > 0 && stats.gid === process.getgid()) ||
+        ((stats.mode & 64) > 0 && stats.uid === process.getuid()));
+}
+// Get the path of cmd.exe in windows
+function getCmdPath() {
+    var _a;
+    return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`;
+}
+exports.getCmdPath = getCmdPath;
+//# sourceMappingURL=io-util.js.map
+
+/***/ }),
+
+/***/ 3357:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
+    });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0;
+const assert_1 = __nccwpck_require__(2613);
+const path = __importStar(__nccwpck_require__(6928));
+const ioUtil = __importStar(__nccwpck_require__(2746));
+/**
+ * Copies a file or folder.
+ * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
+ *
+ * @param     source    source path
+ * @param     dest      destination path
+ * @param     options   optional. See CopyOptions.
+ */
+function cp(source, dest, options = {}) {
+    return __awaiter(this, void 0, void 0, function* () {
+        const { force, recursive, copySourceDirectory } = readCopyOptions(options);
+        const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null;
+        // Dest is an existing file, but not forcing
+        if (destStat && destStat.isFile() && !force) {
+            return;
+        }
+        // If dest is an existing directory, should copy inside.
+        const newDest = destStat && destStat.isDirectory() && copySourceDirectory
+            ? path.join(dest, path.basename(source))
+            : dest;
+        if (!(yield ioUtil.exists(source))) {
+            throw new Error(`no such file or directory: ${source}`);
+        }
+        const sourceStat = yield ioUtil.stat(source);
+        if (sourceStat.isDirectory()) {
+            if (!recursive) {
+                throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`);
+            }
+            else {
+                yield cpDirRecursive(source, newDest, 0, force);
+            }
+        }
+        else {
+            if (path.relative(source, newDest) === '') {
+                // a file cannot be copied to itself
+                throw new Error(`'${newDest}' and '${source}' are the same file`);
+            }
+            yield copyFile(source, newDest, force);
+        }
+    });
+}
+exports.cp = cp;
+/**
+ * Moves a path.
+ *
+ * @param     source    source path
+ * @param     dest      destination path
+ * @param     options   optional. See MoveOptions.
+ */
+function mv(source, dest, options = {}) {
+    return __awaiter(this, void 0, void 0, function* () {
+        if (yield ioUtil.exists(dest)) {
+            let destExists = true;
+            if (yield ioUtil.isDirectory(dest)) {
+                // If dest is directory copy src into dest
+                dest = path.join(dest, path.basename(source));
+                destExists = yield ioUtil.exists(dest);
+            }
+            if (destExists) {
+                if (options.force == null || options.force) {
+                    yield rmRF(dest);
+                }
+                else {
+                    throw new Error('Destination already exists');
+                }
+            }
+        }
+        yield mkdirP(path.dirname(dest));
+        yield ioUtil.rename(source, dest);
+    });
+}
+exports.mv = mv;
+/**
+ * Remove a path recursively with force
+ *
+ * @param inputPath path to remove
+ */
+function rmRF(inputPath) {
+    return __awaiter(this, void 0, void 0, function* () {
+        if (ioUtil.IS_WINDOWS) {
+            // Check for invalid characters
+            // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file
+            if (/[*"<>|]/.test(inputPath)) {
+                throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows');
+            }
+        }
+        try {
+            // note if path does not exist, error is silent
+            yield ioUtil.rm(inputPath, {
+                force: true,
+                maxRetries: 3,
+                recursive: true,
+                retryDelay: 300
+            });
+        }
+        catch (err) {
+            throw new Error(`File was unable to be removed ${err}`);
+        }
+    });
+}
+exports.rmRF = rmRF;
+/**
+ * Make a directory.  Creates the full path with folders in between
+ * Will throw if it fails
+ *
+ * @param   fsPath        path to create
+ * @returns Promise<void>
+ */
+function mkdirP(fsPath) {
+    return __awaiter(this, void 0, void 0, function* () {
+        assert_1.ok(fsPath, 'a path argument must be provided');
+        yield ioUtil.mkdir(fsPath, { recursive: true });
+    });
+}
+exports.mkdirP = mkdirP;
+/**
+ * Returns path of a tool had the tool actually been invoked.  Resolves via paths.
+ * If you check and the tool does not exist, it will throw.
+ *
+ * @param     tool              name of the tool
+ * @param     check             whether to check if tool exists
+ * @returns   Promise<string>   path to tool
+ */
+function which(tool, check) {
+    return __awaiter(this, void 0, void 0, function* () {
+        if (!tool) {
+            throw new Error("parameter 'tool' is required");
+        }
+        // recursive when check=true
+        if (check) {
+            const result = yield which(tool, false);
+            if (!result) {
+                if (ioUtil.IS_WINDOWS) {
+                    throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`);
+                }
+                else {
+                    throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`);
+                }
+            }
+            return result;
+        }
+        const matches = yield findInPath(tool);
+        if (matches && matches.length > 0) {
+            return matches[0];
+        }
+        return '';
+    });
+}
+exports.which = which;
+/**
+ * Returns a list of all occurrences of the given tool on the system path.
+ *
+ * @returns   Promise<string[]>  the paths of the tool
+ */
+function findInPath(tool) {
+    return __awaiter(this, void 0, void 0, function* () {
+        if (!tool) {
+            throw new Error("parameter 'tool' is required");
+        }
+        // build the list of extensions to try
+        const extensions = [];
+        if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) {
+            for (const extension of process.env['PATHEXT'].split(path.delimiter)) {
+                if (extension) {
+                    extensions.push(extension);
+                }
+            }
+        }
+        // if it's rooted, return it if exists. otherwise return empty.
+        if (ioUtil.isRooted(tool)) {
+            const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions);
+            if (filePath) {
+                return [filePath];
+            }
+            return [];
+        }
+        // if any path separators, return empty
+        if (tool.includes(path.sep)) {
+            return [];
+        }
+        // build the list of directories
+        //
+        // Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
+        // it feels like we should not do this. Checking the current directory seems like more of a use
+        // case of a shell, and the which() function exposed by the toolkit should strive for consistency
+        // across platforms.
+        const directories = [];
+        if (process.env.PATH) {
+            for (const p of process.env.PATH.split(path.delimiter)) {
+                if (p) {
+                    directories.push(p);
+                }
+            }
+        }
+        // find all matches
+        const matches = [];
+        for (const directory of directories) {
+            const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions);
+            if (filePath) {
+                matches.push(filePath);
+            }
+        }
+        return matches;
+    });
+}
+exports.findInPath = findInPath;
+function readCopyOptions(options) {
+    const force = options.force == null ? true : options.force;
+    const recursive = Boolean(options.recursive);
+    const copySourceDirectory = options.copySourceDirectory == null
+        ? true
+        : Boolean(options.copySourceDirectory);
+    return { force, recursive, copySourceDirectory };
+}
+function cpDirRecursive(sourceDir, destDir, currentDepth, force) {
+    return __awaiter(this, void 0, void 0, function* () {
+        // Ensure there is not a run away recursive copy
+        if (currentDepth >= 255)
+            return;
+        currentDepth++;
+        yield mkdirP(destDir);
+        const files = yield ioUtil.readdir(sourceDir);
+        for (const fileName of files) {
+            const srcFile = `${sourceDir}/${fileName}`;
+            const destFile = `${destDir}/${fileName}`;
+            const srcFileStat = yield ioUtil.lstat(srcFile);
+            if (srcFileStat.isDirectory()) {
+                // Recurse
+                yield cpDirRecursive(srcFile, destFile, currentDepth, force);
+            }
+            else {
+                yield copyFile(srcFile, destFile, force);
+            }
+        }
+        // Change the mode for the newly created directory
+        yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode);
+    });
+}
+// Buffered file copy
+function copyFile(srcFile, destFile, force) {
+    return __awaiter(this, void 0, void 0, function* () {
+        if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) {
+            // unlink/re-link it
+            try {
+                yield ioUtil.lstat(destFile);
+                yield ioUtil.unlink(destFile);
+            }
+            catch (e) {
+                // Try to override file permission
+                if (e.code === 'EPERM') {
+                    yield ioUtil.chmod(destFile, '0666');
+                    yield ioUtil.unlink(destFile);
+                }
+                // other errors = it doesn't exist, no work to do
+            }
+            // Copy over symlink
+            const symlinkFull = yield ioUtil.readlink(srcFile);
+            yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null);
+        }
+        else if (!(yield ioUtil.exists(destFile)) || force) {
+            yield ioUtil.copyFile(srcFile, destFile);
+        }
+    });
+}
+//# sourceMappingURL=io.js.map
+
+/***/ }),
+
+/***/ 349:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+/// <reference path="../shims-public.d.ts" />
+const listenersMap = new WeakMap();
+const abortedMap = new WeakMap();
+/**
+ * An aborter instance implements AbortSignal interface, can abort HTTP requests.
+ *
+ * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.
+ * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation
+ * cannot or will not ever be cancelled.
+ *
+ * @example
+ * Abort without timeout
+ * ```ts
+ * await doAsyncWork(AbortSignal.none);
+ * ```
+ */
+class AbortSignal {
+    constructor() {
+        /**
+         * onabort event listener.
+         */
+        this.onabort = null;
+        listenersMap.set(this, []);
+        abortedMap.set(this, false);
+    }
+    /**
+     * Status of whether aborted or not.
+     *
+     * @readonly
+     */
+    get aborted() {
+        if (!abortedMap.has(this)) {
+            throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+        }
+        return abortedMap.get(this);
+    }
+    /**
+     * Creates a new AbortSignal instance that will never be aborted.
+     *
+     * @readonly
+     */
+    static get none() {
+        return new AbortSignal();
+    }
+    /**
+     * Added new "abort" event listener, only support "abort" event.
+     *
+     * @param _type - Only support "abort" event
+     * @param listener - The listener to be added
+     */
+    addEventListener(
+    // tslint:disable-next-line:variable-name
+    _type, listener) {
+        if (!listenersMap.has(this)) {
+            throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+        }
+        const listeners = listenersMap.get(this);
+        listeners.push(listener);
+    }
+    /**
+     * Remove "abort" event listener, only support "abort" event.
+     *
+     * @param _type - Only support "abort" event
+     * @param listener - The listener to be removed
+     */
+    removeEventListener(
+    // tslint:disable-next-line:variable-name
+    _type, listener) {
+        if (!listenersMap.has(this)) {
+            throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+        }
+        const listeners = listenersMap.get(this);
+        const index = listeners.indexOf(listener);
+        if (index > -1) {
+            listeners.splice(index, 1);
+        }
+    }
+    /**
+     * Dispatches a synthetic event to the AbortSignal.
+     */
+    dispatchEvent(_event) {
+        throw new Error("This is a stub dispatchEvent implementation that should not be used.  It only exists for type-checking purposes.");
+    }
+}
+/**
+ * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
+ * Will try to trigger abort event for all linked AbortSignal nodes.
+ *
+ * - If there is a timeout, the timer will be cancelled.
+ * - If aborted is true, nothing will happen.
+ *
+ * @internal
+ */
+// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters
+function abortSignal(signal) {
+    if (signal.aborted) {
+        return;
+    }
+    if (signal.onabort) {
+        signal.onabort.call(signal);
+    }
+    const listeners = listenersMap.get(signal);
+    if (listeners) {
+        // Create a copy of listeners so mutations to the array
+        // (e.g. via removeListener calls) don't affect the listeners
+        // we invoke.
+        listeners.slice().forEach((listener) => {
+            listener.call(signal, { type: "abort" });
+        });
+    }
+    abortedMap.set(signal, true);
+}
+
+// Copyright (c) Microsoft Corporation.
+/**
+ * This error is thrown when an asynchronous operation has been aborted.
+ * Check for this error by testing the `name` that the name property of the
+ * error matches `"AbortError"`.
+ *
+ * @example
+ * ```ts
+ * const controller = new AbortController();
+ * controller.abort();
+ * try {
+ *   doAsyncWork(controller.signal)
+ * } catch (e) {
+ *   if (e.name === 'AbortError') {
+ *     // handle abort error here.
+ *   }
+ * }
+ * ```
+ */
+class AbortError extends Error {
+    constructor(message) {
+        super(message);
+        this.name = "AbortError";
+    }
+}
+/**
+ * An AbortController provides an AbortSignal and the associated controls to signal
+ * that an asynchronous operation should be aborted.
+ *
+ * @example
+ * Abort an operation when another event fires
+ * ```ts
+ * const controller = new AbortController();
+ * const signal = controller.signal;
+ * doAsyncWork(signal);
+ * button.addEventListener('click', () => controller.abort());
+ * ```
+ *
+ * @example
+ * Share aborter cross multiple operations in 30s
+ * ```ts
+ * // Upload the same data to 2 different data centers at the same time,
+ * // abort another when any of them is finished
+ * const controller = AbortController.withTimeout(30 * 1000);
+ * doAsyncWork(controller.signal).then(controller.abort);
+ * doAsyncWork(controller.signal).then(controller.abort);
+ *```
+ *
+ * @example
+ * Cascaded aborting
+ * ```ts
+ * // All operations can't take more than 30 seconds
+ * const aborter = Aborter.timeout(30 * 1000);
+ *
+ * // Following 2 operations can't take more than 25 seconds
+ * await doAsyncWork(aborter.withTimeout(25 * 1000));
+ * await doAsyncWork(aborter.withTimeout(25 * 1000));
+ * ```
+ */
+class AbortController {
+    // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
+    constructor(parentSignals) {
+        this._signal = new AbortSignal();
+        if (!parentSignals) {
+            return;
+        }
+        // coerce parentSignals into an array
+        if (!Array.isArray(parentSignals)) {
+            // eslint-disable-next-line prefer-rest-params
+            parentSignals = arguments;
+        }
+        for (const parentSignal of parentSignals) {
+            // if the parent signal has already had abort() called,
+            // then call abort on this signal as well.
+            if (parentSignal.aborted) {
+                this.abort();
+            }
+            else {
+                // when the parent signal aborts, this signal should as well.
+                parentSignal.addEventListener("abort", () => {
+                    this.abort();
+                });
+            }
+        }
+    }
+    /**
+     * The AbortSignal associated with this controller that will signal aborted
+     * when the abort method is called on this controller.
+     *
+     * @readonly
+     */
+    get signal() {
+        return this._signal;
+    }
+    /**
+     * Signal that any operations passed this controller's associated abort signal
+     * to cancel any remaining work and throw an `AbortError`.
+     */
+    abort() {
+        abortSignal(this._signal);
+    }
+    /**
+     * Creates a new AbortSignal instance that will abort after the provided ms.
+     * @param ms - Elapsed time in milliseconds to trigger an abort.
+     */
+    static timeout(ms) {
+        const signal = new AbortSignal();
+        const timer = setTimeout(abortSignal, ms, signal);
+        // Prevent the active Timer from keeping the Node.js event loop active.
+        if (typeof timer.unref === "function") {
+            timer.unref();
+        }
+        return signal;
+    }
+}
+
+exports.AbortController = AbortController;
+exports.AbortError = AbortError;
+exports.AbortSignal = AbortSignal;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 9784:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+var coreRestPipeline = __nccwpck_require__(5565);
+var tslib = __nccwpck_require__(1577);
+var coreAuth = __nccwpck_require__(9967);
+var coreUtil = __nccwpck_require__(402);
+var coreHttpCompat = __nccwpck_require__(7184);
+var coreClient = __nccwpck_require__(8953);
+var coreXml = __nccwpck_require__(9343);
+var logger$1 = __nccwpck_require__(5851);
+var abortController = __nccwpck_require__(764);
+var crypto = __nccwpck_require__(6982);
+var coreTracing = __nccwpck_require__(9340);
+var stream = __nccwpck_require__(2203);
+var coreLro = __nccwpck_require__(2670);
+var events = __nccwpck_require__(4434);
+var fs = __nccwpck_require__(9896);
+var util = __nccwpck_require__(9023);
+var buffer = __nccwpck_require__(181);
+
+function _interopNamespaceDefault(e) {
+    var n = Object.create(null);
+    if (e) {
+        Object.keys(e).forEach(function (k) {
+            if (k !== 'default') {
+                var d = Object.getOwnPropertyDescriptor(e, k);
+                Object.defineProperty(n, k, d.get ? d : {
+                    enumerable: true,
+                    get: function () { return e[k]; }
+                });
+            }
+        });
+    }
+    n.default = e;
+    return Object.freeze(n);
+}
+
+var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat);
+var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient);
+var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs);
+var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util);
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * The `@azure/logger` configuration for this package.
+ */
+const logger = logger$1.createClientLogger("storage-blob");
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * The base class from which all request policies derive.
+ */
+class BaseRequestPolicy {
+    /**
+     * The main method to implement that manipulates a request/response.
+     */
+    constructor(
+    /**
+     * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.
+     */
+    _nextPolicy, 
+    /**
+     * The options that can be passed to a given request policy.
+     */
+    _options) {
+        this._nextPolicy = _nextPolicy;
+        this._options = _options;
+    }
+    /**
+     * Get whether or not a log with the provided log level should be logged.
+     * @param logLevel - The log level of the log that will be logged.
+     * @returns Whether or not a log with the provided log level should be logged.
+     */
+    shouldLog(logLevel) {
+        return this._options.shouldLog(logLevel);
+    }
+    /**
+     * Attempt to log the provided message to the provided logger. If no logger was provided or if
+     * the log level does not meat the logger's threshold, then nothing will be logged.
+     * @param logLevel - The log level of this log.
+     * @param message - The message of this log.
+     */
+    log(logLevel, message) {
+        this._options.log(logLevel, message);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+const SDK_VERSION = "12.26.0";
+const SERVICE_VERSION = "2025-01-05";
+const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB
+const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB
+const BLOCK_BLOB_MAX_BLOCKS = 50000;
+const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB
+const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB
+const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5;
+const REQUEST_TIMEOUT = 100 * 1000; // In ms
+/**
+ * The OAuth scope to use with Azure Storage.
+ */
+const StorageOAuthScopes = "https://storage.azure.com/.default";
+const URLConstants = {
+    Parameters: {
+        FORCE_BROWSER_NO_CACHE: "_",
+        SIGNATURE: "sig",
+        SNAPSHOT: "snapshot",
+        VERSIONID: "versionid",
+        TIMEOUT: "timeout",
+    },
+};
+const HTTPURLConnection = {
+    HTTP_ACCEPTED: 202,
+    HTTP_CONFLICT: 409,
+    HTTP_NOT_FOUND: 404,
+    HTTP_PRECON_FAILED: 412,
+    HTTP_RANGE_NOT_SATISFIABLE: 416,
+};
+const HeaderConstants = {
+    AUTHORIZATION: "Authorization",
+    AUTHORIZATION_SCHEME: "Bearer",
+    CONTENT_ENCODING: "Content-Encoding",
+    CONTENT_ID: "Content-ID",
+    CONTENT_LANGUAGE: "Content-Language",
+    CONTENT_LENGTH: "Content-Length",
+    CONTENT_MD5: "Content-Md5",
+    CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding",
+    CONTENT_TYPE: "Content-Type",
+    COOKIE: "Cookie",
+    DATE: "date",
+    IF_MATCH: "if-match",
+    IF_MODIFIED_SINCE: "if-modified-since",
+    IF_NONE_MATCH: "if-none-match",
+    IF_UNMODIFIED_SINCE: "if-unmodified-since",
+    PREFIX_FOR_STORAGE: "x-ms-",
+    RANGE: "Range",
+    USER_AGENT: "User-Agent",
+    X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id",
+    X_MS_COPY_SOURCE: "x-ms-copy-source",
+    X_MS_DATE: "x-ms-date",
+    X_MS_ERROR_CODE: "x-ms-error-code",
+    X_MS_VERSION: "x-ms-version",
+    X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code",
+};
+const ETagNone = "";
+const ETagAny = "*";
+const SIZE_1_MB = 1 * 1024 * 1024;
+const BATCH_MAX_REQUEST = 256;
+const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;
+const HTTP_LINE_ENDING = "\r\n";
+const HTTP_VERSION_1_1 = "HTTP/1.1";
+const EncryptionAlgorithmAES25 = "AES256";
+const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;
+const StorageBlobLoggingAllowedHeaderNames = [
+    "Access-Control-Allow-Origin",
+    "Cache-Control",
+    "Content-Length",
+    "Content-Type",
+    "Date",
+    "Request-Id",
+    "traceparent",
+    "Transfer-Encoding",
+    "User-Agent",
+    "x-ms-client-request-id",
+    "x-ms-date",
+    "x-ms-error-code",
+    "x-ms-request-id",
+    "x-ms-return-client-request-id",
+    "x-ms-version",
+    "Accept-Ranges",
+    "Content-Disposition",
+    "Content-Encoding",
+    "Content-Language",
+    "Content-MD5",
+    "Content-Range",
+    "ETag",
+    "Last-Modified",
+    "Server",
+    "Vary",
+    "x-ms-content-crc64",
+    "x-ms-copy-action",
+    "x-ms-copy-completion-time",
+    "x-ms-copy-id",
+    "x-ms-copy-progress",
+    "x-ms-copy-status",
+    "x-ms-has-immutability-policy",
+    "x-ms-has-legal-hold",
+    "x-ms-lease-state",
+    "x-ms-lease-status",
+    "x-ms-range",
+    "x-ms-request-server-encrypted",
+    "x-ms-server-encrypted",
+    "x-ms-snapshot",
+    "x-ms-source-range",
+    "If-Match",
+    "If-Modified-Since",
+    "If-None-Match",
+    "If-Unmodified-Since",
+    "x-ms-access-tier",
+    "x-ms-access-tier-change-time",
+    "x-ms-access-tier-inferred",
+    "x-ms-account-kind",
+    "x-ms-archive-status",
+    "x-ms-blob-append-offset",
+    "x-ms-blob-cache-control",
+    "x-ms-blob-committed-block-count",
+    "x-ms-blob-condition-appendpos",
+    "x-ms-blob-condition-maxsize",
+    "x-ms-blob-content-disposition",
+    "x-ms-blob-content-encoding",
+    "x-ms-blob-content-language",
+    "x-ms-blob-content-length",
+    "x-ms-blob-content-md5",
+    "x-ms-blob-content-type",
+    "x-ms-blob-public-access",
+    "x-ms-blob-sequence-number",
+    "x-ms-blob-type",
+    "x-ms-copy-destination-snapshot",
+    "x-ms-creation-time",
+    "x-ms-default-encryption-scope",
+    "x-ms-delete-snapshots",
+    "x-ms-delete-type-permanent",
+    "x-ms-deny-encryption-scope-override",
+    "x-ms-encryption-algorithm",
+    "x-ms-if-sequence-number-eq",
+    "x-ms-if-sequence-number-le",
+    "x-ms-if-sequence-number-lt",
+    "x-ms-incremental-copy",
+    "x-ms-lease-action",
+    "x-ms-lease-break-period",
+    "x-ms-lease-duration",
+    "x-ms-lease-id",
+    "x-ms-lease-time",
+    "x-ms-page-write",
+    "x-ms-proposed-lease-id",
+    "x-ms-range-get-content-md5",
+    "x-ms-rehydrate-priority",
+    "x-ms-sequence-number-action",
+    "x-ms-sku-name",
+    "x-ms-source-content-md5",
+    "x-ms-source-if-match",
+    "x-ms-source-if-modified-since",
+    "x-ms-source-if-none-match",
+    "x-ms-source-if-unmodified-since",
+    "x-ms-tag-count",
+    "x-ms-encryption-key-sha256",
+    "x-ms-copy-source-error-code",
+    "x-ms-copy-source-status-code",
+    "x-ms-if-tags",
+    "x-ms-source-if-tags",
+];
+const StorageBlobLoggingAllowedQueryParameters = [
+    "comp",
+    "maxresults",
+    "rscc",
+    "rscd",
+    "rsce",
+    "rscl",
+    "rsct",
+    "se",
+    "si",
+    "sip",
+    "sp",
+    "spr",
+    "sr",
+    "srt",
+    "ss",
+    "st",
+    "sv",
+    "include",
+    "marker",
+    "prefix",
+    "copyid",
+    "restype",
+    "blockid",
+    "blocklisttype",
+    "delimiter",
+    "prevsnapshot",
+    "ske",
+    "skoid",
+    "sks",
+    "skt",
+    "sktid",
+    "skv",
+    "snapshot",
+];
+const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption";
+const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption";
+/// List of ports used for path style addressing.
+/// Path style addressing means that storage account is put in URI's Path segment in instead of in host.
+const PathStylePorts = [
+    "10000",
+    "10001",
+    "10002",
+    "10003",
+    "10004",
+    "10100",
+    "10101",
+    "10102",
+    "10103",
+    "10104",
+    "11000",
+    "11001",
+    "11002",
+    "11003",
+    "11004",
+    "11100",
+    "11101",
+    "11102",
+    "11103",
+    "11104",
+];
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Reserved URL characters must be properly escaped for Storage services like Blob or File.
+ *
+ * ## URL encode and escape strategy for JS SDKs
+ *
+ * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.
+ * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL
+ * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.
+ *
+ * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.
+ *
+ * This is what legacy V2 SDK does, simple and works for most of the cases.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b:",
+ *   SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A",
+ *   SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created.
+ *
+ * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is
+ * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name.
+ * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created.
+ * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.
+ * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two:
+ *
+ * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.
+ *
+ * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b:",
+ *   SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A",
+ *   There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A",
+ *   There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created.
+ *
+ * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string
+ * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL.
+ * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample.
+ * And following URL strings are invalid:
+ * - "http://account.blob.core.windows.net/con/b%"
+ * - "http://account.blob.core.windows.net/con/b%2"
+ * - "http://account.blob.core.windows.net/con/b%G"
+ *
+ * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string.
+ *
+ * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`
+ *
+ * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata
+ *
+ * @param url -
+ */
+function escapeURLPath(url) {
+    const urlParsed = new URL(url);
+    let path = urlParsed.pathname;
+    path = path || "/";
+    path = escape(path);
+    urlParsed.pathname = path;
+    return urlParsed.toString();
+}
+function getProxyUriFromDevConnString(connectionString) {
+    // Development Connection String
+    // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key
+    let proxyUri = "";
+    if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) {
+        // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri
+        const matchCredentials = connectionString.split(";");
+        for (const element of matchCredentials) {
+            if (element.trim().startsWith("DevelopmentStorageProxyUri=")) {
+                proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1];
+            }
+        }
+    }
+    return proxyUri;
+}
+function getValueInConnString(connectionString, argument) {
+    const elements = connectionString.split(";");
+    for (const element of elements) {
+        if (element.trim().startsWith(argument)) {
+            return element.trim().match(argument + "=(.*)")[1];
+        }
+    }
+    return "";
+}
+/**
+ * Extracts the parts of an Azure Storage account connection string.
+ *
+ * @param connectionString - Connection string.
+ * @returns String key value pairs of the storage account's url and credentials.
+ */
+function extractConnectionStringParts(connectionString) {
+    let proxyUri = "";
+    if (connectionString.startsWith("UseDevelopmentStorage=true")) {
+        // Development connection string
+        proxyUri = getProxyUriFromDevConnString(connectionString);
+        connectionString = DevelopmentConnectionString;
+    }
+    // Matching BlobEndpoint in the Account connection string
+    let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint");
+    // Slicing off '/' at the end if exists
+    // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)
+    blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint;
+    if (connectionString.search("DefaultEndpointsProtocol=") !== -1 &&
+        connectionString.search("AccountKey=") !== -1) {
+        // Account connection string
+        let defaultEndpointsProtocol = "";
+        let accountName = "";
+        let accountKey = Buffer.from("accountKey", "base64");
+        let endpointSuffix = "";
+        // Get account name and key
+        accountName = getValueInConnString(connectionString, "AccountName");
+        accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64");
+        if (!blobEndpoint) {
+            // BlobEndpoint is not present in the Account connection string
+            // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`
+            defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol");
+            const protocol = defaultEndpointsProtocol.toLowerCase();
+            if (protocol !== "https" && protocol !== "http") {
+                throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'");
+            }
+            endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix");
+            if (!endpointSuffix) {
+                throw new Error("Invalid EndpointSuffix in the provided Connection String");
+            }
+            blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;
+        }
+        if (!accountName) {
+            throw new Error("Invalid AccountName in the provided Connection String");
+        }
+        else if (accountKey.length === 0) {
+            throw new Error("Invalid AccountKey in the provided Connection String");
+        }
+        return {
+            kind: "AccountConnString",
+            url: blobEndpoint,
+            accountName,
+            accountKey,
+            proxyUri,
+        };
+    }
+    else {
+        // SAS connection string
+        let accountSas = getValueInConnString(connectionString, "SharedAccessSignature");
+        let accountName = getValueInConnString(connectionString, "AccountName");
+        // if accountName is empty, try to read it from BlobEndpoint
+        if (!accountName) {
+            accountName = getAccountNameFromUrl(blobEndpoint);
+        }
+        if (!blobEndpoint) {
+            throw new Error("Invalid BlobEndpoint in the provided SAS Connection String");
+        }
+        else if (!accountSas) {
+            throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String");
+        }
+        // client constructors assume accountSas does *not* start with ?
+        if (accountSas.startsWith("?")) {
+            accountSas = accountSas.substring(1);
+        }
+        return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas };
+    }
+}
+/**
+ * Internal escape method implemented Strategy Two mentioned in escapeURL() description.
+ *
+ * @param text -
+ */
+function escape(text) {
+    return encodeURIComponent(text)
+        .replace(/%2F/g, "/") // Don't escape for "/"
+        .replace(/'/g, "%27") // Escape for "'"
+        .replace(/\+/g, "%20")
+        .replace(/%25/g, "%"); // Revert encoded "%"
+}
+/**
+ * Append a string to URL path. Will remove duplicated "/" in front of the string
+ * when URL path ends with a "/".
+ *
+ * @param url - Source URL string
+ * @param name - String to be appended to URL
+ * @returns An updated URL string
+ */
+function appendToURLPath(url, name) {
+    const urlParsed = new URL(url);
+    let path = urlParsed.pathname;
+    path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name;
+    urlParsed.pathname = path;
+    return urlParsed.toString();
+}
+/**
+ * Set URL parameter name and value. If name exists in URL parameters, old value
+ * will be replaced by name key. If not provide value, the parameter will be deleted.
+ *
+ * @param url - Source URL string
+ * @param name - Parameter name
+ * @param value - Parameter value
+ * @returns An updated URL string
+ */
+function setURLParameter(url, name, value) {
+    const urlParsed = new URL(url);
+    const encodedName = encodeURIComponent(name);
+    const encodedValue = value ? encodeURIComponent(value) : undefined;
+    // mutating searchParams will change the encoding, so we have to do this ourselves
+    const searchString = urlParsed.search === "" ? "?" : urlParsed.search;
+    const searchPieces = [];
+    for (const pair of searchString.slice(1).split("&")) {
+        if (pair) {
+            const [key] = pair.split("=", 2);
+            if (key !== encodedName) {
+                searchPieces.push(pair);
+            }
+        }
+    }
+    if (encodedValue) {
+        searchPieces.push(`${encodedName}=${encodedValue}`);
+    }
+    urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : "";
+    return urlParsed.toString();
+}
+/**
+ * Get URL parameter by name.
+ *
+ * @param url -
+ * @param name -
+ */
+function getURLParameter(url, name) {
+    var _a;
+    const urlParsed = new URL(url);
+    return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined;
+}
+/**
+ * Set URL host.
+ *
+ * @param url - Source URL string
+ * @param host - New host string
+ * @returns An updated URL string
+ */
+function setURLHost(url, host) {
+    const urlParsed = new URL(url);
+    urlParsed.hostname = host;
+    return urlParsed.toString();
+}
+/**
+ * Get URL path from an URL string.
+ *
+ * @param url - Source URL string
+ */
+function getURLPath(url) {
+    try {
+        const urlParsed = new URL(url);
+        return urlParsed.pathname;
+    }
+    catch (e) {
+        return undefined;
+    }
+}
+/**
+ * Get URL scheme from an URL string.
+ *
+ * @param url - Source URL string
+ */
+function getURLScheme(url) {
+    try {
+        const urlParsed = new URL(url);
+        return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol;
+    }
+    catch (e) {
+        return undefined;
+    }
+}
+/**
+ * Get URL path and query from an URL string.
+ *
+ * @param url - Source URL string
+ */
+function getURLPathAndQuery(url) {
+    const urlParsed = new URL(url);
+    const pathString = urlParsed.pathname;
+    if (!pathString) {
+        throw new RangeError("Invalid url without valid path.");
+    }
+    let queryString = urlParsed.search || "";
+    queryString = queryString.trim();
+    if (queryString !== "") {
+        queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?'
+    }
+    return `${pathString}${queryString}`;
+}
+/**
+ * Get URL query key value pairs from an URL string.
+ *
+ * @param url -
+ */
+function getURLQueries(url) {
+    let queryString = new URL(url).search;
+    if (!queryString) {
+        return {};
+    }
+    queryString = queryString.trim();
+    queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString;
+    let querySubStrings = queryString.split("&");
+    querySubStrings = querySubStrings.filter((value) => {
+        const indexOfEqual = value.indexOf("=");
+        const lastIndexOfEqual = value.lastIndexOf("=");
+        return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1);
+    });
+    const queries = {};
+    for (const querySubString of querySubStrings) {
+        const splitResults = querySubString.split("=");
+        const key = splitResults[0];
+        const value = splitResults[1];
+        queries[key] = value;
+    }
+    return queries;
+}
+/**
+ * Append a string to URL query.
+ *
+ * @param url - Source URL string.
+ * @param queryParts - String to be appended to the URL query.
+ * @returns An updated URL string.
+ */
+function appendToURLQuery(url, queryParts) {
+    const urlParsed = new URL(url);
+    let query = urlParsed.search;
+    if (query) {
+        query += "&" + queryParts;
+    }
+    else {
+        query = queryParts;
+    }
+    urlParsed.search = query;
+    return urlParsed.toString();
+}
+/**
+ * Rounds a date off to seconds.
+ *
+ * @param date -
+ * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
+ *                                          If false, YYYY-MM-DDThh:mm:ssZ will be returned.
+ * @returns Date string in ISO8061 format, with or without 7 milliseconds component
+ */
+function truncatedISO8061Date(date, withMilliseconds = true) {
+    // Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
+    const dateString = date.toISOString();
+    return withMilliseconds
+        ? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
+        : dateString.substring(0, dateString.length - 5) + "Z";
+}
+/**
+ * Base64 encode.
+ *
+ * @param content -
+ */
+function base64encode(content) {
+    return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64");
+}
+/**
+ * Generate a 64 bytes base64 block ID string.
+ *
+ * @param blockIndex -
+ */
+function generateBlockID(blockIDPrefix, blockIndex) {
+    // To generate a 64 bytes base64 string, source string should be 48
+    const maxSourceStringLength = 48;
+    // A blob can have a maximum of 100,000 uncommitted blocks at any given time
+    const maxBlockIndexLength = 6;
+    const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;
+    if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {
+        blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);
+    }
+    const res = blockIDPrefix +
+        padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0");
+    return base64encode(res);
+}
+/**
+ * Delay specified time interval.
+ *
+ * @param timeInMs -
+ * @param aborter -
+ * @param abortError -
+ */
+async function delay(timeInMs, aborter, abortError) {
+    return new Promise((resolve, reject) => {
+        /* eslint-disable-next-line prefer-const */
+        let timeout;
+        const abortHandler = () => {
+            if (timeout !== undefined) {
+                clearTimeout(timeout);
+            }
+            reject(abortError);
+        };
+        const resolveHandler = () => {
+            if (aborter !== undefined) {
+                aborter.removeEventListener("abort", abortHandler);
+            }
+            resolve();
+        };
+        timeout = setTimeout(resolveHandler, timeInMs);
+        if (aborter !== undefined) {
+            aborter.addEventListener("abort", abortHandler);
+        }
+    });
+}
+/**
+ * String.prototype.padStart()
+ *
+ * @param currentString -
+ * @param targetLength -
+ * @param padString -
+ */
+function padStart(currentString, targetLength, padString = " ") {
+    // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes
+    if (String.prototype.padStart) {
+        return currentString.padStart(targetLength, padString);
+    }
+    padString = padString || " ";
+    if (currentString.length > targetLength) {
+        return currentString;
+    }
+    else {
+        targetLength = targetLength - currentString.length;
+        if (targetLength > padString.length) {
+            padString += padString.repeat(targetLength / padString.length);
+        }
+        return padString.slice(0, targetLength) + currentString;
+    }
+}
+/**
+ * If two strings are equal when compared case insensitive.
+ *
+ * @param str1 -
+ * @param str2 -
+ */
+function iEqual(str1, str2) {
+    return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();
+}
+/**
+ * Extracts account name from the url
+ * @param url - url to extract the account name from
+ * @returns with the account name
+ */
+function getAccountNameFromUrl(url) {
+    const parsedUrl = new URL(url);
+    let accountName;
+    try {
+        if (parsedUrl.hostname.split(".")[1] === "blob") {
+            // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;
+            accountName = parsedUrl.hostname.split(".")[0];
+        }
+        else if (isIpEndpointStyle(parsedUrl)) {
+            // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/
+            // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/
+            // .getPath() -> /devstoreaccount1/
+            accountName = parsedUrl.pathname.split("/")[1];
+        }
+        else {
+            // Custom domain case: "https://customdomain.com/containername/blob".
+            accountName = "";
+        }
+        return accountName;
+    }
+    catch (error) {
+        throw new Error("Unable to extract accountName with provided information.");
+    }
+}
+function isIpEndpointStyle(parsedUrl) {
+    const host = parsedUrl.host;
+    // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.
+    // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part.
+    // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.
+    // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.
+    return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) ||
+        (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port)));
+}
+/**
+ * Convert Tags to encoded string.
+ *
+ * @param tags -
+ */
+function toBlobTagsString(tags) {
+    if (tags === undefined) {
+        return undefined;
+    }
+    const tagPairs = [];
+    for (const key in tags) {
+        if (Object.prototype.hasOwnProperty.call(tags, key)) {
+            const value = tags[key];
+            tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);
+        }
+    }
+    return tagPairs.join("&");
+}
+/**
+ * Convert Tags type to BlobTags.
+ *
+ * @param tags -
+ */
+function toBlobTags(tags) {
+    if (tags === undefined) {
+        return undefined;
+    }
+    const res = {
+        blobTagSet: [],
+    };
+    for (const key in tags) {
+        if (Object.prototype.hasOwnProperty.call(tags, key)) {
+            const value = tags[key];
+            res.blobTagSet.push({
+                key,
+                value,
+            });
+        }
+    }
+    return res;
+}
+/**
+ * Covert BlobTags to Tags type.
+ *
+ * @param tags -
+ */
+function toTags(tags) {
+    if (tags === undefined) {
+        return undefined;
+    }
+    const res = {};
+    for (const blobTag of tags.blobTagSet) {
+        res[blobTag.key] = blobTag.value;
+    }
+    return res;
+}
+/**
+ * Convert BlobQueryTextConfiguration to QuerySerialization type.
+ *
+ * @param textConfiguration -
+ */
+function toQuerySerialization(textConfiguration) {
+    if (textConfiguration === undefined) {
+        return undefined;
+    }
+    switch (textConfiguration.kind) {
+        case "csv":
+            return {
+                format: {
+                    type: "delimited",
+                    delimitedTextConfiguration: {
+                        columnSeparator: textConfiguration.columnSeparator || ",",
+                        fieldQuote: textConfiguration.fieldQuote || "",
+                        recordSeparator: textConfiguration.recordSeparator,
+                        escapeChar: textConfiguration.escapeCharacter || "",
+                        headersPresent: textConfiguration.hasHeaders || false,
+                    },
+                },
+            };
+        case "json":
+            return {
+                format: {
+                    type: "json",
+                    jsonTextConfiguration: {
+                        recordSeparator: textConfiguration.recordSeparator,
+                    },
+                },
+            };
+        case "arrow":
+            return {
+                format: {
+                    type: "arrow",
+                    arrowConfiguration: {
+                        schema: textConfiguration.schema,
+                    },
+                },
+            };
+        case "parquet":
+            return {
+                format: {
+                    type: "parquet",
+                },
+            };
+        default:
+            throw Error("Invalid BlobQueryTextConfiguration.");
+    }
+}
+function parseObjectReplicationRecord(objectReplicationRecord) {
+    if (!objectReplicationRecord) {
+        return undefined;
+    }
+    if ("policy-id" in objectReplicationRecord) {
+        // If the dictionary contains a key with policy id, we are not required to do any parsing since
+        // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.
+        return undefined;
+    }
+    const orProperties = [];
+    for (const key in objectReplicationRecord) {
+        const ids = key.split("_");
+        const policyPrefix = "or-";
+        if (ids[0].startsWith(policyPrefix)) {
+            ids[0] = ids[0].substring(policyPrefix.length);
+        }
+        const rule = {
+            ruleId: ids[1],
+            replicationStatus: objectReplicationRecord[key],
+        };
+        const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);
+        if (policyIndex > -1) {
+            orProperties[policyIndex].rules.push(rule);
+        }
+        else {
+            orProperties.push({
+                policyId: ids[0],
+                rules: [rule],
+            });
+        }
+    }
+    return orProperties;
+}
+function httpAuthorizationToString(httpAuthorization) {
+    return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined;
+}
+function BlobNameToString(name) {
+    if (name.encoded) {
+        return decodeURIComponent(name.content);
+    }
+    else {
+        return name.content;
+    }
+}
+function ConvertInternalResponseOfListBlobFlat(internalResponse) {
+    return Object.assign(Object.assign({}, internalResponse), { segment: {
+            blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {
+                const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) });
+                return blobItem;
+            }),
+        } });
+}
+function ConvertInternalResponseOfListBlobHierarchy(internalResponse) {
+    var _a;
+    return Object.assign(Object.assign({}, internalResponse), { segment: {
+            blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => {
+                const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) });
+                return blobPrefix;
+            }),
+            blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {
+                const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) });
+                return blobItem;
+            }),
+        } });
+}
+function* ExtractPageRangeInfoItems(getPageRangesSegment) {
+    let pageRange = [];
+    let clearRange = [];
+    if (getPageRangesSegment.pageRange)
+        pageRange = getPageRangesSegment.pageRange;
+    if (getPageRangesSegment.clearRange)
+        clearRange = getPageRangesSegment.clearRange;
+    let pageRangeIndex = 0;
+    let clearRangeIndex = 0;
+    while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {
+        if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {
+            yield {
+                start: pageRange[pageRangeIndex].start,
+                end: pageRange[pageRangeIndex].end,
+                isClear: false,
+            };
+            ++pageRangeIndex;
+        }
+        else {
+            yield {
+                start: clearRange[clearRangeIndex].start,
+                end: clearRange[clearRangeIndex].end,
+                isClear: true,
+            };
+            ++clearRangeIndex;
+        }
+    }
+    for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {
+        yield {
+            start: pageRange[pageRangeIndex].start,
+            end: pageRange[pageRangeIndex].end,
+            isClear: false,
+        };
+    }
+    for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {
+        yield {
+            start: clearRange[clearRangeIndex].start,
+            end: clearRange[clearRangeIndex].end,
+            isClear: true,
+        };
+    }
+}
+/**
+ * Escape the blobName but keep path separator ('/').
+ */
+function EscapePath(blobName) {
+    const split = blobName.split("/");
+    for (let i = 0; i < split.length; i++) {
+        split[i] = encodeURIComponent(split[i]);
+    }
+    return split.join("/");
+}
+/**
+ * A typesafe helper for ensuring that a given response object has
+ * the original _response attached.
+ * @param response - A response object from calling a client operation
+ * @returns The same object, but with known _response property
+ */
+function assertResponse(response) {
+    if (`_response` in response) {
+        return response;
+    }
+    throw new TypeError(`Unexpected response object ${response}`);
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * RetryPolicy types.
+ */
+exports.StorageRetryPolicyType = void 0;
+(function (StorageRetryPolicyType) {
+    /**
+     * Exponential retry. Retry time delay grows exponentially.
+     */
+    StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL";
+    /**
+     * Linear retry. Retry time delay grows linearly.
+     */
+    StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED";
+})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {}));
+// Default values of StorageRetryOptions
+const DEFAULT_RETRY_OPTIONS$1 = {
+    maxRetryDelayInMs: 120 * 1000,
+    maxTries: 4,
+    retryDelayInMs: 4 * 1000,
+    retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL,
+    secondaryHost: "",
+    tryTimeoutInMs: undefined, // Use server side default timeout strategy
+};
+const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted.");
+/**
+ * Retry policy with exponential retry and linear retry implemented.
+ */
+class StorageRetryPolicy extends BaseRequestPolicy {
+    /**
+     * Creates an instance of RetryPolicy.
+     *
+     * @param nextPolicy -
+     * @param options -
+     * @param retryOptions -
+     */
+    constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) {
+        super(nextPolicy, options);
+        // Initialize retry options
+        this.retryOptions = {
+            retryPolicyType: retryOptions.retryPolicyType
+                ? retryOptions.retryPolicyType
+                : DEFAULT_RETRY_OPTIONS$1.retryPolicyType,
+            maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1
+                ? Math.floor(retryOptions.maxTries)
+                : DEFAULT_RETRY_OPTIONS$1.maxTries,
+            tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0
+                ? retryOptions.tryTimeoutInMs
+                : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs,
+            retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0
+                ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs
+                    ? retryOptions.maxRetryDelayInMs
+                    : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs)
+                : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs,
+            maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0
+                ? retryOptions.maxRetryDelayInMs
+                : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs,
+            secondaryHost: retryOptions.secondaryHost
+                ? retryOptions.secondaryHost
+                : DEFAULT_RETRY_OPTIONS$1.secondaryHost,
+        };
+    }
+    /**
+     * Sends request.
+     *
+     * @param request -
+     */
+    async sendRequest(request) {
+        return this.attemptSendRequest(request, false, 1);
+    }
+    /**
+     * Decide and perform next retry. Won't mutate request parameter.
+     *
+     * @param request -
+     * @param secondaryHas404 -  If attempt was against the secondary & it returned a StatusNotFound (404), then
+     *                                   the resource was not found. This may be due to replication delay. So, in this
+     *                                   case, we'll never try the secondary again for this operation.
+     * @param attempt -           How many retries has been attempted to performed, starting from 1, which includes
+     *                                   the attempt will be performed by this method call.
+     */
+    async attemptSendRequest(request, secondaryHas404, attempt) {
+        const newRequest = request.clone();
+        const isPrimaryRetry = secondaryHas404 ||
+            !this.retryOptions.secondaryHost ||
+            !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") ||
+            attempt % 2 === 1;
+        if (!isPrimaryRetry) {
+            newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost);
+        }
+        // Set the server-side timeout query parameter "timeout=[seconds]"
+        if (this.retryOptions.tryTimeoutInMs) {
+            newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString());
+        }
+        let response;
+        try {
+            logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`);
+            response = await this._nextPolicy.sendRequest(newRequest);
+            if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {
+                return response;
+            }
+            secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);
+        }
+        catch (err) {
+            logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);
+            if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {
+                throw err;
+            }
+        }
+        await this.delay(isPrimaryRetry, attempt, request.abortSignal);
+        return this.attemptSendRequest(request, secondaryHas404, ++attempt);
+    }
+    /**
+     * Decide whether to retry according to last HTTP response and retry counters.
+     *
+     * @param isPrimaryRetry -
+     * @param attempt -
+     * @param response -
+     * @param err -
+     */
+    shouldRetry(isPrimaryRetry, attempt, response, err) {
+        if (attempt >= this.retryOptions.maxTries) {
+            logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions
+                .maxTries}, no further try.`);
+            return false;
+        }
+        // Handle network failures, you may need to customize the list when you implement
+        // your own http client
+        const retriableErrors = [
+            "ETIMEDOUT",
+            "ESOCKETTIMEDOUT",
+            "ECONNREFUSED",
+            "ECONNRESET",
+            "ENOENT",
+            "ENOTFOUND",
+            "TIMEOUT",
+            "EPIPE",
+            "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js
+        ];
+        if (err) {
+            for (const retriableError of retriableErrors) {
+                if (err.name.toUpperCase().includes(retriableError) ||
+                    err.message.toUpperCase().includes(retriableError) ||
+                    (err.code && err.code.toString().toUpperCase() === retriableError)) {
+                    logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);
+                    return true;
+                }
+            }
+        }
+        // If attempt was against the secondary & it returned a StatusNotFound (404), then
+        // the resource was not found. This may be due to replication delay. So, in this
+        // case, we'll never try the secondary again for this operation.
+        if (response || err) {
+            const statusCode = response ? response.status : err ? err.statusCode : 0;
+            if (!isPrimaryRetry && statusCode === 404) {
+                logger.info(`RetryPolicy: Secondary access with 404, will retry.`);
+                return true;
+            }
+            // Server internal error or server timeout
+            if (statusCode === 503 || statusCode === 500) {
+                logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);
+                return true;
+            }
+        }
+        // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now.
+        // if (response) {
+        //   // Retry select Copy Source Error Codes.
+        //   if (response?.status >= 400) {
+        //     const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode);
+        //     if (copySourceError !== undefined) {
+        //       switch (copySourceError) {
+        //         case "InternalError":
+        //         case "OperationTimedOut":
+        //         case "ServerBusy":
+        //           return true;
+        //       }
+        //     }
+        //   }
+        // }
+        if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) {
+            logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry.");
+            return true;
+        }
+        return false;
+    }
+    /**
+     * Delay a calculated time between retries.
+     *
+     * @param isPrimaryRetry -
+     * @param attempt -
+     * @param abortSignal -
+     */
+    async delay(isPrimaryRetry, attempt, abortSignal) {
+        let delayTimeInMs = 0;
+        if (isPrimaryRetry) {
+            switch (this.retryOptions.retryPolicyType) {
+                case exports.StorageRetryPolicyType.EXPONENTIAL:
+                    delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs);
+                    break;
+                case exports.StorageRetryPolicyType.FIXED:
+                    delayTimeInMs = this.retryOptions.retryDelayInMs;
+                    break;
+            }
+        }
+        else {
+            delayTimeInMs = Math.random() * 1000;
+        }
+        logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);
+        return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.
+ */
+class StorageRetryPolicyFactory {
+    /**
+     * Creates an instance of StorageRetryPolicyFactory.
+     * @param retryOptions -
+     */
+    constructor(retryOptions) {
+        this.retryOptions = retryOptions;
+    }
+    /**
+     * Creates a StorageRetryPolicy object.
+     *
+     * @param nextPolicy -
+     * @param options -
+     */
+    create(nextPolicy, options) {
+        return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Credential policy used to sign HTTP(S) requests before sending. This is an
+ * abstract class.
+ */
+class CredentialPolicy extends BaseRequestPolicy {
+    /**
+     * Sends out request.
+     *
+     * @param request -
+     */
+    sendRequest(request) {
+        return this._nextPolicy.sendRequest(this.signRequest(request));
+    }
+    /**
+     * Child classes must implement this method with request signing. This method
+     * will be executed in {@link sendRequest}.
+     *
+     * @param request -
+     */
+    signRequest(request) {
+        // Child classes must override this method with request signing. This method
+        // will be executed in sendRequest().
+        return request;
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/*
+ * We need to imitate .Net culture-aware sorting, which is used in storage service.
+ * Below tables contain sort-keys for en-US culture.
+ */
+const table_lv0 = new Uint32Array([
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721,
+    0x723, 0x725, 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e,
+    0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe02, 0xe09, 0xe0a,
+    0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89,
+    0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x0, 0x0, 0x743, 0x744, 0x748,
+    0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70,
+    0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c,
+    0x0, 0x750, 0x0,
+]);
+const table_lv2 = new Uint32Array([
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12,
+    0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12,
+    0x12, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+]);
+const table_lv4 = new Uint32Array([
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+    0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
+]);
+function compareHeader(lhs, rhs) {
+    if (isLessThan(lhs, rhs))
+        return -1;
+    return 1;
+}
+function isLessThan(lhs, rhs) {
+    const tables = [table_lv0, table_lv2, table_lv4];
+    let curr_level = 0;
+    let i = 0;
+    let j = 0;
+    while (curr_level < tables.length) {
+        if (curr_level === tables.length - 1 && i !== j) {
+            return i > j;
+        }
+        const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 0x1;
+        const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 0x1;
+        if (weight1 === 0x1 && weight2 === 0x1) {
+            i = 0;
+            j = 0;
+            ++curr_level;
+        }
+        else if (weight1 === weight2) {
+            ++i;
+            ++j;
+        }
+        else if (weight1 === 0) {
+            ++i;
+        }
+        else if (weight2 === 0) {
+            ++j;
+        }
+        else {
+            return weight1 < weight2;
+        }
+    }
+    return false;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.
+ */
+class StorageSharedKeyCredentialPolicy extends CredentialPolicy {
+    /**
+     * Creates an instance of StorageSharedKeyCredentialPolicy.
+     * @param nextPolicy -
+     * @param options -
+     * @param factory -
+     */
+    constructor(nextPolicy, options, factory) {
+        super(nextPolicy, options);
+        this.factory = factory;
+    }
+    /**
+     * Signs request.
+     *
+     * @param request -
+     */
+    signRequest(request) {
+        request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());
+        if (request.body &&
+            (typeof request.body === "string" || request.body !== undefined) &&
+            request.body.length > 0) {
+            request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));
+        }
+        const stringToSign = [
+            request.method.toUpperCase(),
+            this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),
+            this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),
+            this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),
+            this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),
+            this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),
+            this.getHeaderValueToSign(request, HeaderConstants.DATE),
+            this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),
+            this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),
+            this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),
+            this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),
+            this.getHeaderValueToSign(request, HeaderConstants.RANGE),
+        ].join("\n") +
+            "\n" +
+            this.getCanonicalizedHeadersString(request) +
+            this.getCanonicalizedResourceString(request);
+        const signature = this.factory.computeHMACSHA256(stringToSign);
+        request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`);
+        // console.log(`[URL]:${request.url}`);
+        // console.log(`[HEADERS]:${request.headers.toString()}`);
+        // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);
+        // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
+        return request;
+    }
+    /**
+     * Retrieve header value according to shared key sign rules.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
+     *
+     * @param request -
+     * @param headerName -
+     */
+    getHeaderValueToSign(request, headerName) {
+        const value = request.headers.get(headerName);
+        if (!value) {
+            return "";
+        }
+        // When using version 2015-02-21 or later, if Content-Length is zero, then
+        // set the Content-Length part of the StringToSign to an empty string.
+        // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
+        if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
+            return "";
+        }
+        return value;
+    }
+    /**
+     * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:
+     * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.
+     * 2. Convert each HTTP header name to lowercase.
+     * 3. Sort the headers lexicographically by header name, in ascending order.
+     *    Each header may appear only once in the string.
+     * 4. Replace any linear whitespace in the header value with a single space.
+     * 5. Trim any whitespace around the colon in the header.
+     * 6. Finally, append a new-line character to each canonicalized header in the resulting list.
+     *    Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.
+     *
+     * @param request -
+     */
+    getCanonicalizedHeadersString(request) {
+        let headersArray = request.headers.headersArray().filter((value) => {
+            return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);
+        });
+        headersArray.sort((a, b) => {
+            return compareHeader(a.name.toLowerCase(), b.name.toLowerCase());
+        });
+        // Remove duplicate headers
+        headersArray = headersArray.filter((value, index, array) => {
+            if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {
+                return false;
+            }
+            return true;
+        });
+        let canonicalizedHeadersStringToSign = "";
+        headersArray.forEach((header) => {
+            canonicalizedHeadersStringToSign += `${header.name
+                .toLowerCase()
+                .trimRight()}:${header.value.trimLeft()}\n`;
+        });
+        return canonicalizedHeadersStringToSign;
+    }
+    /**
+     * Retrieves the webResource canonicalized resource string.
+     *
+     * @param request -
+     */
+    getCanonicalizedResourceString(request) {
+        const path = getURLPath(request.url) || "/";
+        let canonicalizedResourceString = "";
+        canonicalizedResourceString += `/${this.factory.accountName}${path}`;
+        const queries = getURLQueries(request.url);
+        const lowercaseQueries = {};
+        if (queries) {
+            const queryKeys = [];
+            for (const key in queries) {
+                if (Object.prototype.hasOwnProperty.call(queries, key)) {
+                    const lowercaseKey = key.toLowerCase();
+                    lowercaseQueries[lowercaseKey] = queries[key];
+                    queryKeys.push(lowercaseKey);
+                }
+            }
+            queryKeys.sort();
+            for (const key of queryKeys) {
+                canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;
+            }
+        }
+        return canonicalizedResourceString;
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Credential is an abstract class for Azure Storage HTTP requests signing. This
+ * class will host an credentialPolicyCreator factory which generates CredentialPolicy.
+ */
+class Credential {
+    /**
+     * Creates a RequestPolicy object.
+     *
+     * @param _nextPolicy -
+     * @param _options -
+     */
+    create(_nextPolicy, _options) {
+        throw new Error("Method should be implemented in children classes.");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * StorageSharedKeyCredential for account key authorization of Azure Storage service.
+ */
+class StorageSharedKeyCredential extends Credential {
+    /**
+     * Creates an instance of StorageSharedKeyCredential.
+     * @param accountName -
+     * @param accountKey -
+     */
+    constructor(accountName, accountKey) {
+        super();
+        this.accountName = accountName;
+        this.accountKey = Buffer.from(accountKey, "base64");
+    }
+    /**
+     * Creates a StorageSharedKeyCredentialPolicy object.
+     *
+     * @param nextPolicy -
+     * @param options -
+     */
+    create(nextPolicy, options) {
+        return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);
+    }
+    /**
+     * Generates a hash signature for an HTTP request or for a SAS.
+     *
+     * @param stringToSign -
+     */
+    computeHMACSHA256(stringToSign) {
+        return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources
+ * or for use with Shared Access Signatures (SAS).
+ */
+class AnonymousCredentialPolicy extends CredentialPolicy {
+    /**
+     * Creates an instance of AnonymousCredentialPolicy.
+     * @param nextPolicy -
+     * @param options -
+     */
+    // The base class has a protected constructor. Adding a public one to enable constructing of this class.
+    /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/
+    constructor(nextPolicy, options) {
+        super(nextPolicy, options);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * AnonymousCredential provides a credentialPolicyCreator member used to create
+ * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with
+ * HTTP(S) requests that read public resources or for use with Shared Access
+ * Signatures (SAS).
+ */
+class AnonymousCredential extends Credential {
+    /**
+     * Creates an {@link AnonymousCredentialPolicy} object.
+     *
+     * @param nextPolicy -
+     * @param options -
+     */
+    create(nextPolicy, options) {
+        return new AnonymousCredentialPolicy(nextPolicy, options);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+let _defaultHttpClient;
+function getCachedDefaultHttpClient() {
+    if (!_defaultHttpClient) {
+        _defaultHttpClient = coreRestPipeline.createDefaultHttpClient();
+    }
+    return _defaultHttpClient;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * The programmatic identifier of the StorageBrowserPolicy.
+ */
+const storageBrowserPolicyName = "storageBrowserPolicy";
+/**
+ * storageBrowserPolicy is a policy used to prevent browsers from caching requests
+ * and to remove cookies and explicit content-length headers.
+ */
+function storageBrowserPolicy() {
+    return {
+        name: storageBrowserPolicyName,
+        async sendRequest(request, next) {
+            if (coreUtil.isNode) {
+                return next(request);
+            }
+            if (request.method === "GET" || request.method === "HEAD") {
+                request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString());
+            }
+            request.headers.delete(HeaderConstants.COOKIE);
+            // According to XHR standards, content-length should be fully controlled by browsers
+            request.headers.delete(HeaderConstants.CONTENT_LENGTH);
+            return next(request);
+        },
+    };
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Name of the {@link storageRetryPolicy}
+ */
+const storageRetryPolicyName = "storageRetryPolicy";
+/**
+ * RetryPolicy types.
+ */
+var StorageRetryPolicyType;
+(function (StorageRetryPolicyType) {
+    /**
+     * Exponential retry. Retry time delay grows exponentially.
+     */
+    StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL";
+    /**
+     * Linear retry. Retry time delay grows linearly.
+     */
+    StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED";
+})(StorageRetryPolicyType || (StorageRetryPolicyType = {}));
+// Default values of StorageRetryOptions
+const DEFAULT_RETRY_OPTIONS = {
+    maxRetryDelayInMs: 120 * 1000,
+    maxTries: 4,
+    retryDelayInMs: 4 * 1000,
+    retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,
+    secondaryHost: "",
+    tryTimeoutInMs: undefined, // Use server side default timeout strategy
+};
+const retriableErrors = [
+    "ETIMEDOUT",
+    "ESOCKETTIMEDOUT",
+    "ECONNREFUSED",
+    "ECONNRESET",
+    "ENOENT",
+    "ENOTFOUND",
+    "TIMEOUT",
+    "EPIPE",
+    "REQUEST_SEND_ERROR",
+];
+const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted.");
+/**
+ * Retry policy with exponential retry and linear retry implemented.
+ */
+function storageRetryPolicy(options = {}) {
+    var _a, _b, _c, _d, _e, _f;
+    const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType;
+    const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries;
+    const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs;
+    const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs;
+    const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost;
+    const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs;
+    function shouldRetry({ isPrimaryRetry, attempt, response, error, }) {
+        var _a, _b;
+        if (attempt >= maxTries) {
+            logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`);
+            return false;
+        }
+        if (error) {
+            for (const retriableError of retriableErrors) {
+                if (error.name.toUpperCase().includes(retriableError) ||
+                    error.message.toUpperCase().includes(retriableError) ||
+                    (error.code && error.code.toString().toUpperCase() === retriableError)) {
+                    logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);
+                    return true;
+                }
+            }
+            if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" &&
+                (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) {
+                logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry.");
+                return true;
+            }
+        }
+        // If attempt was against the secondary & it returned a StatusNotFound (404), then
+        // the resource was not found. This may be due to replication delay. So, in this
+        // case, we'll never try the secondary again for this operation.
+        if (response || error) {
+            const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0;
+            if (!isPrimaryRetry && statusCode === 404) {
+                logger.info(`RetryPolicy: Secondary access with 404, will retry.`);
+                return true;
+            }
+            // Server internal error or server timeout
+            if (statusCode === 503 || statusCode === 500) {
+                logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);
+                return true;
+            }
+        }
+        // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now.
+        // if (response) {
+        //   // Retry select Copy Source Error Codes.
+        //   if (response?.status >= 400) {
+        //     const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode);
+        //     if (copySourceError !== undefined) {
+        //       switch (copySourceError) {
+        //         case "InternalError":
+        //         case "OperationTimedOut":
+        //         case "ServerBusy":
+        //           return true;
+        //       }
+        //     }
+        //   }
+        // }
+        return false;
+    }
+    function calculateDelay(isPrimaryRetry, attempt) {
+        let delayTimeInMs = 0;
+        if (isPrimaryRetry) {
+            switch (retryPolicyType) {
+                case StorageRetryPolicyType.EXPONENTIAL:
+                    delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs);
+                    break;
+                case StorageRetryPolicyType.FIXED:
+                    delayTimeInMs = retryDelayInMs;
+                    break;
+            }
+        }
+        else {
+            delayTimeInMs = Math.random() * 1000;
+        }
+        logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);
+        return delayTimeInMs;
+    }
+    return {
+        name: storageRetryPolicyName,
+        async sendRequest(request, next) {
+            // Set the server-side timeout query parameter "timeout=[seconds]"
+            if (tryTimeoutInMs) {
+                request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000)));
+            }
+            const primaryUrl = request.url;
+            const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined;
+            let secondaryHas404 = false;
+            let attempt = 1;
+            let retryAgain = true;
+            let response;
+            let error;
+            while (retryAgain) {
+                const isPrimaryRetry = secondaryHas404 ||
+                    !secondaryUrl ||
+                    !["GET", "HEAD", "OPTIONS"].includes(request.method) ||
+                    attempt % 2 === 1;
+                request.url = isPrimaryRetry ? primaryUrl : secondaryUrl;
+                response = undefined;
+                error = undefined;
+                try {
+                    logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`);
+                    response = await next(request);
+                    secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);
+                }
+                catch (e) {
+                    if (coreRestPipeline.isRestError(e)) {
+                        logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`);
+                        error = e;
+                    }
+                    else {
+                        logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`);
+                        throw e;
+                    }
+                }
+                retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error });
+                if (retryAgain) {
+                    await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR);
+                }
+                attempt++;
+            }
+            if (response) {
+                return response;
+            }
+            throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error.");
+        },
+    };
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * The programmatic identifier of the storageSharedKeyCredentialPolicy.
+ */
+const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy";
+/**
+ * storageSharedKeyCredentialPolicy handles signing requests using storage account keys.
+ */
+function storageSharedKeyCredentialPolicy(options) {
+    function signRequest(request) {
+        request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());
+        if (request.body &&
+            (typeof request.body === "string" || Buffer.isBuffer(request.body)) &&
+            request.body.length > 0) {
+            request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));
+        }
+        const stringToSign = [
+            request.method.toUpperCase(),
+            getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),
+            getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),
+            getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),
+            getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),
+            getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),
+            getHeaderValueToSign(request, HeaderConstants.DATE),
+            getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),
+            getHeaderValueToSign(request, HeaderConstants.IF_MATCH),
+            getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),
+            getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),
+            getHeaderValueToSign(request, HeaderConstants.RANGE),
+        ].join("\n") +
+            "\n" +
+            getCanonicalizedHeadersString(request) +
+            getCanonicalizedResourceString(request);
+        const signature = crypto.createHmac("sha256", options.accountKey)
+            .update(stringToSign, "utf8")
+            .digest("base64");
+        request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`);
+        // console.log(`[URL]:${request.url}`);
+        // console.log(`[HEADERS]:${request.headers.toString()}`);
+        // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);
+        // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
+    }
+    /**
+     * Retrieve header value according to shared key sign rules.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
+     */
+    function getHeaderValueToSign(request, headerName) {
+        const value = request.headers.get(headerName);
+        if (!value) {
+            return "";
+        }
+        // When using version 2015-02-21 or later, if Content-Length is zero, then
+        // set the Content-Length part of the StringToSign to an empty string.
+        // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
+        if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
+            return "";
+        }
+        return value;
+    }
+    /**
+     * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:
+     * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.
+     * 2. Convert each HTTP header name to lowercase.
+     * 3. Sort the headers lexicographically by header name, in ascending order.
+     *    Each header may appear only once in the string.
+     * 4. Replace any linear whitespace in the header value with a single space.
+     * 5. Trim any whitespace around the colon in the header.
+     * 6. Finally, append a new-line character to each canonicalized header in the resulting list.
+     *    Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.
+     *
+     */
+    function getCanonicalizedHeadersString(request) {
+        let headersArray = [];
+        for (const [name, value] of request.headers) {
+            if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) {
+                headersArray.push({ name, value });
+            }
+        }
+        headersArray.sort((a, b) => {
+            return compareHeader(a.name.toLowerCase(), b.name.toLowerCase());
+        });
+        // Remove duplicate headers
+        headersArray = headersArray.filter((value, index, array) => {
+            if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {
+                return false;
+            }
+            return true;
+        });
+        let canonicalizedHeadersStringToSign = "";
+        headersArray.forEach((header) => {
+            canonicalizedHeadersStringToSign += `${header.name
+                .toLowerCase()
+                .trimRight()}:${header.value.trimLeft()}\n`;
+        });
+        return canonicalizedHeadersStringToSign;
+    }
+    function getCanonicalizedResourceString(request) {
+        const path = getURLPath(request.url) || "/";
+        let canonicalizedResourceString = "";
+        canonicalizedResourceString += `/${options.accountName}${path}`;
+        const queries = getURLQueries(request.url);
+        const lowercaseQueries = {};
+        if (queries) {
+            const queryKeys = [];
+            for (const key in queries) {
+                if (Object.prototype.hasOwnProperty.call(queries, key)) {
+                    const lowercaseKey = key.toLowerCase();
+                    lowercaseQueries[lowercaseKey] = queries[key];
+                    queryKeys.push(lowercaseKey);
+                }
+            }
+            queryKeys.sort();
+            for (const key of queryKeys) {
+                canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;
+            }
+        }
+        return canonicalizedResourceString;
+    }
+    return {
+        name: storageSharedKeyCredentialPolicyName,
+        async sendRequest(request, next) {
+            signRequest(request);
+            return next(request);
+        },
+    };
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:
+ *
+ * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.
+ * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL
+ * thus avoid the browser cache.
+ *
+ * 2. Remove cookie header for security
+ *
+ * 3. Remove content-length header to avoid browsers warning
+ */
+class StorageBrowserPolicy extends BaseRequestPolicy {
+    /**
+     * Creates an instance of StorageBrowserPolicy.
+     * @param nextPolicy -
+     * @param options -
+     */
+    // The base class has a protected constructor. Adding a public one to enable constructing of this class.
+    /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/
+    constructor(nextPolicy, options) {
+        super(nextPolicy, options);
+    }
+    /**
+     * Sends out request.
+     *
+     * @param request -
+     */
+    async sendRequest(request) {
+        if (coreUtil.isNode) {
+            return this._nextPolicy.sendRequest(request);
+        }
+        if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") {
+            request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString());
+        }
+        request.headers.remove(HeaderConstants.COOKIE);
+        // According to XHR standards, content-length should be fully controlled by browsers
+        request.headers.remove(HeaderConstants.CONTENT_LENGTH);
+        return this._nextPolicy.sendRequest(request);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.
+ */
+class StorageBrowserPolicyFactory {
+    /**
+     * Creates a StorageBrowserPolicyFactory object.
+     *
+     * @param nextPolicy -
+     * @param options -
+     */
+    create(nextPolicy, options) {
+        return new StorageBrowserPolicy(nextPolicy, options);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * The programmatic identifier of the storageCorrectContentLengthPolicy.
+ */
+const storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy";
+/**
+ * storageCorrectContentLengthPolicy to correctly set Content-Length header with request body length.
+ */
+function storageCorrectContentLengthPolicy() {
+    function correctContentLength(request) {
+        if (request.body &&
+            (typeof request.body === "string" || Buffer.isBuffer(request.body)) &&
+            request.body.length > 0) {
+            request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));
+        }
+    }
+    return {
+        name: storageCorrectContentLengthPolicyName,
+        async sendRequest(request, next) {
+            correctContentLength(request);
+            return next(request);
+        },
+    };
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * A helper to decide if a given argument satisfies the Pipeline contract
+ * @param pipeline - An argument that may be a Pipeline
+ * @returns true when the argument satisfies the Pipeline contract
+ */
+function isPipelineLike(pipeline) {
+    if (!pipeline || typeof pipeline !== "object") {
+        return false;
+    }
+    const castPipeline = pipeline;
+    return (Array.isArray(castPipeline.factories) &&
+        typeof castPipeline.options === "object" &&
+        typeof castPipeline.toServiceClientOptions === "function");
+}
+/**
+ * A Pipeline class containing HTTP request policies.
+ * You can create a default Pipeline by calling {@link newPipeline}.
+ * Or you can create a Pipeline with your own policies by the constructor of Pipeline.
+ *
+ * Refer to {@link newPipeline} and provided policies before implementing your
+ * customized Pipeline.
+ */
+class Pipeline {
+    /**
+     * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.
+     *
+     * @param factories -
+     * @param options -
+     */
+    constructor(factories, options = {}) {
+        this.factories = factories;
+        this.options = options;
+    }
+    /**
+     * Transfer Pipeline object to ServiceClientOptions object which is required by
+     * ServiceClient constructor.
+     *
+     * @returns The ServiceClientOptions object from this Pipeline.
+     */
+    toServiceClientOptions() {
+        return {
+            httpClient: this.options.httpClient,
+            requestPolicyFactories: this.factories,
+        };
+    }
+}
+/**
+ * Creates a new Pipeline object with Credential provided.
+ *
+ * @param credential -  Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
+ * @param pipelineOptions - Optional. Options.
+ * @returns A new Pipeline object.
+ */
+function newPipeline(credential, pipelineOptions = {}) {
+    if (!credential) {
+        credential = new AnonymousCredential();
+    }
+    const pipeline = new Pipeline([], pipelineOptions);
+    pipeline._credential = credential;
+    return pipeline;
+}
+function processDownlevelPipeline(pipeline) {
+    const knownFactoryFunctions = [
+        isAnonymousCredential,
+        isStorageSharedKeyCredential,
+        isCoreHttpBearerTokenFactory,
+        isStorageBrowserPolicyFactory,
+        isStorageRetryPolicyFactory,
+        isStorageTelemetryPolicyFactory,
+        isCoreHttpPolicyFactory,
+    ];
+    if (pipeline.factories.length) {
+        const novelFactories = pipeline.factories.filter((factory) => {
+            return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory));
+        });
+        if (novelFactories.length) {
+            const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory));
+            // if there are any left over, wrap in a requestPolicyFactoryPolicy
+            return {
+                wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories),
+                afterRetry: hasInjector,
+            };
+        }
+    }
+    return undefined;
+}
+function getCoreClientOptions(pipeline) {
+    var _a;
+    const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]);
+    let httpClient = pipeline._coreHttpClient;
+    if (!httpClient) {
+        httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient();
+        pipeline._coreHttpClient = httpClient;
+    }
+    let corePipeline = pipeline._corePipeline;
+    if (!corePipeline) {
+        const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`;
+        const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix
+            ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}`
+            : `${packageDetails}`;
+        corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: {
+                additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,
+                additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,
+                logger: logger.info,
+            }, userAgentOptions: {
+                userAgentPrefix,
+            }, serializationOptions: {
+                stringifyXML: coreXml.stringifyXML,
+                serializerOptions: {
+                    xml: {
+                        // Use customized XML char key of "#" so we can deserialize metadata
+                        // with "_" key
+                        xmlCharKey: "#",
+                    },
+                },
+            }, deserializationOptions: {
+                parseXML: coreXml.parseXML,
+                serializerOptions: {
+                    xml: {
+                        // Use customized XML char key of "#" so we can deserialize metadata
+                        // with "_" key
+                        xmlCharKey: "#",
+                    },
+                },
+            } }));
+        corePipeline.removePolicy({ phase: "Retry" });
+        corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName });
+        corePipeline.addPolicy(storageCorrectContentLengthPolicy());
+        corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" });
+        corePipeline.addPolicy(storageBrowserPolicy());
+        const downlevelResults = processDownlevelPipeline(pipeline);
+        if (downlevelResults) {
+            corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined);
+        }
+        const credential = getCredentialFromPipeline(pipeline);
+        if (coreAuth.isTokenCredential(credential)) {
+            corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({
+                credential,
+                scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes,
+                challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge },
+            }), { phase: "Sign" });
+        }
+        else if (credential instanceof StorageSharedKeyCredential) {
+            corePipeline.addPolicy(storageSharedKeyCredentialPolicy({
+                accountName: credential.accountName,
+                accountKey: credential.accountKey,
+            }), { phase: "Sign" });
+        }
+        pipeline._corePipeline = corePipeline;
+    }
+    return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline });
+}
+function getCredentialFromPipeline(pipeline) {
+    // see if we squirreled one away on the type itself
+    if (pipeline._credential) {
+        return pipeline._credential;
+    }
+    // if it came from another package, loop over the factories and look for one like before
+    let credential = new AnonymousCredential();
+    for (const factory of pipeline.factories) {
+        if (coreAuth.isTokenCredential(factory.credential)) {
+            // Only works if the factory has been attached a "credential" property.
+            // We do that in newPipeline() when using TokenCredential.
+            credential = factory.credential;
+        }
+        else if (isStorageSharedKeyCredential(factory)) {
+            return factory;
+        }
+    }
+    return credential;
+}
+function isStorageSharedKeyCredential(factory) {
+    if (factory instanceof StorageSharedKeyCredential) {
+        return true;
+    }
+    return factory.constructor.name === "StorageSharedKeyCredential";
+}
+function isAnonymousCredential(factory) {
+    if (factory instanceof AnonymousCredential) {
+        return true;
+    }
+    return factory.constructor.name === "AnonymousCredential";
+}
+function isCoreHttpBearerTokenFactory(factory) {
+    return coreAuth.isTokenCredential(factory.credential);
+}
+function isStorageBrowserPolicyFactory(factory) {
+    if (factory instanceof StorageBrowserPolicyFactory) {
+        return true;
+    }
+    return factory.constructor.name === "StorageBrowserPolicyFactory";
+}
+function isStorageRetryPolicyFactory(factory) {
+    if (factory instanceof StorageRetryPolicyFactory) {
+        return true;
+    }
+    return factory.constructor.name === "StorageRetryPolicyFactory";
+}
+function isStorageTelemetryPolicyFactory(factory) {
+    return factory.constructor.name === "TelemetryPolicyFactory";
+}
+function isInjectorPolicyFactory(factory) {
+    return factory.constructor.name === "InjectorPolicyFactory";
+}
+function isCoreHttpPolicyFactory(factory) {
+    const knownPolicies = [
+        "GenerateClientRequestIdPolicy",
+        "TracingPolicy",
+        "LogPolicy",
+        "ProxyPolicy",
+        "DisableResponseDecompressionPolicy",
+        "KeepAlivePolicy",
+        "DeserializationPolicy",
+    ];
+    const mockHttpClient = {
+        sendRequest: async (request) => {
+            return {
+                request,
+                headers: request.headers.clone(),
+                status: 500,
+            };
+        },
+    };
+    const mockRequestPolicyOptions = {
+        log(_logLevel, _message) {
+            /* do nothing */
+        },
+        shouldLog(_logLevel) {
+            return false;
+        },
+    };
+    const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions);
+    const policyName = policyInstance.constructor.name;
+    // bundlers sometimes add a custom suffix to the class name to make it unique
+    return knownPolicies.some((knownPolicyName) => {
+        return policyName.startsWith(knownPolicyName);
+    });
+}
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+const BlobServiceProperties = {
+    serializedName: "BlobServiceProperties",
+    xmlName: "StorageServiceProperties",
+    type: {
+        name: "Composite",
+        className: "BlobServiceProperties",
+        modelProperties: {
+            blobAnalyticsLogging: {
+                serializedName: "Logging",
+                xmlName: "Logging",
+                type: {
+                    name: "Composite",
+                    className: "Logging",
+                },
+            },
+            hourMetrics: {
+                serializedName: "HourMetrics",
+                xmlName: "HourMetrics",
+                type: {
+                    name: "Composite",
+                    className: "Metrics",
+                },
+            },
+            minuteMetrics: {
+                serializedName: "MinuteMetrics",
+                xmlName: "MinuteMetrics",
+                type: {
+                    name: "Composite",
+                    className: "Metrics",
+                },
+            },
+            cors: {
+                serializedName: "Cors",
+                xmlName: "Cors",
+                xmlIsWrapped: true,
+                xmlElementName: "CorsRule",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "CorsRule",
+                        },
+                    },
+                },
+            },
+            defaultServiceVersion: {
+                serializedName: "DefaultServiceVersion",
+                xmlName: "DefaultServiceVersion",
+                type: {
+                    name: "String",
+                },
+            },
+            deleteRetentionPolicy: {
+                serializedName: "DeleteRetentionPolicy",
+                xmlName: "DeleteRetentionPolicy",
+                type: {
+                    name: "Composite",
+                    className: "RetentionPolicy",
+                },
+            },
+            staticWebsite: {
+                serializedName: "StaticWebsite",
+                xmlName: "StaticWebsite",
+                type: {
+                    name: "Composite",
+                    className: "StaticWebsite",
+                },
+            },
+        },
+    },
+};
+const Logging = {
+    serializedName: "Logging",
+    type: {
+        name: "Composite",
+        className: "Logging",
+        modelProperties: {
+            version: {
+                serializedName: "Version",
+                required: true,
+                xmlName: "Version",
+                type: {
+                    name: "String",
+                },
+            },
+            deleteProperty: {
+                serializedName: "Delete",
+                required: true,
+                xmlName: "Delete",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            read: {
+                serializedName: "Read",
+                required: true,
+                xmlName: "Read",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            write: {
+                serializedName: "Write",
+                required: true,
+                xmlName: "Write",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            retentionPolicy: {
+                serializedName: "RetentionPolicy",
+                xmlName: "RetentionPolicy",
+                type: {
+                    name: "Composite",
+                    className: "RetentionPolicy",
+                },
+            },
+        },
+    },
+};
+const RetentionPolicy = {
+    serializedName: "RetentionPolicy",
+    type: {
+        name: "Composite",
+        className: "RetentionPolicy",
+        modelProperties: {
+            enabled: {
+                serializedName: "Enabled",
+                required: true,
+                xmlName: "Enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            days: {
+                constraints: {
+                    InclusiveMinimum: 1,
+                },
+                serializedName: "Days",
+                xmlName: "Days",
+                type: {
+                    name: "Number",
+                },
+            },
+        },
+    },
+};
+const Metrics = {
+    serializedName: "Metrics",
+    type: {
+        name: "Composite",
+        className: "Metrics",
+        modelProperties: {
+            version: {
+                serializedName: "Version",
+                xmlName: "Version",
+                type: {
+                    name: "String",
+                },
+            },
+            enabled: {
+                serializedName: "Enabled",
+                required: true,
+                xmlName: "Enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            includeAPIs: {
+                serializedName: "IncludeAPIs",
+                xmlName: "IncludeAPIs",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            retentionPolicy: {
+                serializedName: "RetentionPolicy",
+                xmlName: "RetentionPolicy",
+                type: {
+                    name: "Composite",
+                    className: "RetentionPolicy",
+                },
+            },
+        },
+    },
+};
+const CorsRule = {
+    serializedName: "CorsRule",
+    type: {
+        name: "Composite",
+        className: "CorsRule",
+        modelProperties: {
+            allowedOrigins: {
+                serializedName: "AllowedOrigins",
+                required: true,
+                xmlName: "AllowedOrigins",
+                type: {
+                    name: "String",
+                },
+            },
+            allowedMethods: {
+                serializedName: "AllowedMethods",
+                required: true,
+                xmlName: "AllowedMethods",
+                type: {
+                    name: "String",
+                },
+            },
+            allowedHeaders: {
+                serializedName: "AllowedHeaders",
+                required: true,
+                xmlName: "AllowedHeaders",
+                type: {
+                    name: "String",
+                },
+            },
+            exposedHeaders: {
+                serializedName: "ExposedHeaders",
+                required: true,
+                xmlName: "ExposedHeaders",
+                type: {
+                    name: "String",
+                },
+            },
+            maxAgeInSeconds: {
+                constraints: {
+                    InclusiveMinimum: 0,
+                },
+                serializedName: "MaxAgeInSeconds",
+                required: true,
+                xmlName: "MaxAgeInSeconds",
+                type: {
+                    name: "Number",
+                },
+            },
+        },
+    },
+};
+const StaticWebsite = {
+    serializedName: "StaticWebsite",
+    type: {
+        name: "Composite",
+        className: "StaticWebsite",
+        modelProperties: {
+            enabled: {
+                serializedName: "Enabled",
+                required: true,
+                xmlName: "Enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            indexDocument: {
+                serializedName: "IndexDocument",
+                xmlName: "IndexDocument",
+                type: {
+                    name: "String",
+                },
+            },
+            errorDocument404Path: {
+                serializedName: "ErrorDocument404Path",
+                xmlName: "ErrorDocument404Path",
+                type: {
+                    name: "String",
+                },
+            },
+            defaultIndexDocumentPath: {
+                serializedName: "DefaultIndexDocumentPath",
+                xmlName: "DefaultIndexDocumentPath",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const StorageError = {
+    serializedName: "StorageError",
+    type: {
+        name: "Composite",
+        className: "StorageError",
+        modelProperties: {
+            message: {
+                serializedName: "Message",
+                xmlName: "Message",
+                type: {
+                    name: "String",
+                },
+            },
+            code: {
+                serializedName: "Code",
+                xmlName: "Code",
+                type: {
+                    name: "String",
+                },
+            },
+            authenticationErrorDetail: {
+                serializedName: "AuthenticationErrorDetail",
+                xmlName: "AuthenticationErrorDetail",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobServiceStatistics = {
+    serializedName: "BlobServiceStatistics",
+    xmlName: "StorageServiceStats",
+    type: {
+        name: "Composite",
+        className: "BlobServiceStatistics",
+        modelProperties: {
+            geoReplication: {
+                serializedName: "GeoReplication",
+                xmlName: "GeoReplication",
+                type: {
+                    name: "Composite",
+                    className: "GeoReplication",
+                },
+            },
+        },
+    },
+};
+const GeoReplication = {
+    serializedName: "GeoReplication",
+    type: {
+        name: "Composite",
+        className: "GeoReplication",
+        modelProperties: {
+            status: {
+                serializedName: "Status",
+                required: true,
+                xmlName: "Status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["live", "bootstrap", "unavailable"],
+                },
+            },
+            lastSyncOn: {
+                serializedName: "LastSyncTime",
+                required: true,
+                xmlName: "LastSyncTime",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ListContainersSegmentResponse = {
+    serializedName: "ListContainersSegmentResponse",
+    xmlName: "EnumerationResults",
+    type: {
+        name: "Composite",
+        className: "ListContainersSegmentResponse",
+        modelProperties: {
+            serviceEndpoint: {
+                serializedName: "ServiceEndpoint",
+                required: true,
+                xmlName: "ServiceEndpoint",
+                xmlIsAttribute: true,
+                type: {
+                    name: "String",
+                },
+            },
+            prefix: {
+                serializedName: "Prefix",
+                xmlName: "Prefix",
+                type: {
+                    name: "String",
+                },
+            },
+            marker: {
+                serializedName: "Marker",
+                xmlName: "Marker",
+                type: {
+                    name: "String",
+                },
+            },
+            maxPageSize: {
+                serializedName: "MaxResults",
+                xmlName: "MaxResults",
+                type: {
+                    name: "Number",
+                },
+            },
+            containerItems: {
+                serializedName: "ContainerItems",
+                required: true,
+                xmlName: "Containers",
+                xmlIsWrapped: true,
+                xmlElementName: "Container",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "ContainerItem",
+                        },
+                    },
+                },
+            },
+            continuationToken: {
+                serializedName: "NextMarker",
+                xmlName: "NextMarker",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerItem = {
+    serializedName: "ContainerItem",
+    xmlName: "Container",
+    type: {
+        name: "Composite",
+        className: "ContainerItem",
+        modelProperties: {
+            name: {
+                serializedName: "Name",
+                required: true,
+                xmlName: "Name",
+                type: {
+                    name: "String",
+                },
+            },
+            deleted: {
+                serializedName: "Deleted",
+                xmlName: "Deleted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            version: {
+                serializedName: "Version",
+                xmlName: "Version",
+                type: {
+                    name: "String",
+                },
+            },
+            properties: {
+                serializedName: "Properties",
+                xmlName: "Properties",
+                type: {
+                    name: "Composite",
+                    className: "ContainerProperties",
+                },
+            },
+            metadata: {
+                serializedName: "Metadata",
+                xmlName: "Metadata",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+        },
+    },
+};
+const ContainerProperties = {
+    serializedName: "ContainerProperties",
+    type: {
+        name: "Composite",
+        className: "ContainerProperties",
+        modelProperties: {
+            lastModified: {
+                serializedName: "Last-Modified",
+                required: true,
+                xmlName: "Last-Modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            etag: {
+                serializedName: "Etag",
+                required: true,
+                xmlName: "Etag",
+                type: {
+                    name: "String",
+                },
+            },
+            leaseStatus: {
+                serializedName: "LeaseStatus",
+                xmlName: "LeaseStatus",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["locked", "unlocked"],
+                },
+            },
+            leaseState: {
+                serializedName: "LeaseState",
+                xmlName: "LeaseState",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "available",
+                        "leased",
+                        "expired",
+                        "breaking",
+                        "broken",
+                    ],
+                },
+            },
+            leaseDuration: {
+                serializedName: "LeaseDuration",
+                xmlName: "LeaseDuration",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["infinite", "fixed"],
+                },
+            },
+            publicAccess: {
+                serializedName: "PublicAccess",
+                xmlName: "PublicAccess",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["container", "blob"],
+                },
+            },
+            hasImmutabilityPolicy: {
+                serializedName: "HasImmutabilityPolicy",
+                xmlName: "HasImmutabilityPolicy",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            hasLegalHold: {
+                serializedName: "HasLegalHold",
+                xmlName: "HasLegalHold",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            defaultEncryptionScope: {
+                serializedName: "DefaultEncryptionScope",
+                xmlName: "DefaultEncryptionScope",
+                type: {
+                    name: "String",
+                },
+            },
+            preventEncryptionScopeOverride: {
+                serializedName: "DenyEncryptionScopeOverride",
+                xmlName: "DenyEncryptionScopeOverride",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            deletedOn: {
+                serializedName: "DeletedTime",
+                xmlName: "DeletedTime",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            remainingRetentionDays: {
+                serializedName: "RemainingRetentionDays",
+                xmlName: "RemainingRetentionDays",
+                type: {
+                    name: "Number",
+                },
+            },
+            isImmutableStorageWithVersioningEnabled: {
+                serializedName: "ImmutableStorageWithVersioningEnabled",
+                xmlName: "ImmutableStorageWithVersioningEnabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const KeyInfo = {
+    serializedName: "KeyInfo",
+    type: {
+        name: "Composite",
+        className: "KeyInfo",
+        modelProperties: {
+            startsOn: {
+                serializedName: "Start",
+                required: true,
+                xmlName: "Start",
+                type: {
+                    name: "String",
+                },
+            },
+            expiresOn: {
+                serializedName: "Expiry",
+                required: true,
+                xmlName: "Expiry",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const UserDelegationKey = {
+    serializedName: "UserDelegationKey",
+    type: {
+        name: "Composite",
+        className: "UserDelegationKey",
+        modelProperties: {
+            signedObjectId: {
+                serializedName: "SignedOid",
+                required: true,
+                xmlName: "SignedOid",
+                type: {
+                    name: "String",
+                },
+            },
+            signedTenantId: {
+                serializedName: "SignedTid",
+                required: true,
+                xmlName: "SignedTid",
+                type: {
+                    name: "String",
+                },
+            },
+            signedStartsOn: {
+                serializedName: "SignedStart",
+                required: true,
+                xmlName: "SignedStart",
+                type: {
+                    name: "String",
+                },
+            },
+            signedExpiresOn: {
+                serializedName: "SignedExpiry",
+                required: true,
+                xmlName: "SignedExpiry",
+                type: {
+                    name: "String",
+                },
+            },
+            signedService: {
+                serializedName: "SignedService",
+                required: true,
+                xmlName: "SignedService",
+                type: {
+                    name: "String",
+                },
+            },
+            signedVersion: {
+                serializedName: "SignedVersion",
+                required: true,
+                xmlName: "SignedVersion",
+                type: {
+                    name: "String",
+                },
+            },
+            value: {
+                serializedName: "Value",
+                required: true,
+                xmlName: "Value",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const FilterBlobSegment = {
+    serializedName: "FilterBlobSegment",
+    xmlName: "EnumerationResults",
+    type: {
+        name: "Composite",
+        className: "FilterBlobSegment",
+        modelProperties: {
+            serviceEndpoint: {
+                serializedName: "ServiceEndpoint",
+                required: true,
+                xmlName: "ServiceEndpoint",
+                xmlIsAttribute: true,
+                type: {
+                    name: "String",
+                },
+            },
+            where: {
+                serializedName: "Where",
+                required: true,
+                xmlName: "Where",
+                type: {
+                    name: "String",
+                },
+            },
+            blobs: {
+                serializedName: "Blobs",
+                required: true,
+                xmlName: "Blobs",
+                xmlIsWrapped: true,
+                xmlElementName: "Blob",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "FilterBlobItem",
+                        },
+                    },
+                },
+            },
+            continuationToken: {
+                serializedName: "NextMarker",
+                xmlName: "NextMarker",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const FilterBlobItem = {
+    serializedName: "FilterBlobItem",
+    xmlName: "Blob",
+    type: {
+        name: "Composite",
+        className: "FilterBlobItem",
+        modelProperties: {
+            name: {
+                serializedName: "Name",
+                required: true,
+                xmlName: "Name",
+                type: {
+                    name: "String",
+                },
+            },
+            containerName: {
+                serializedName: "ContainerName",
+                required: true,
+                xmlName: "ContainerName",
+                type: {
+                    name: "String",
+                },
+            },
+            tags: {
+                serializedName: "Tags",
+                xmlName: "Tags",
+                type: {
+                    name: "Composite",
+                    className: "BlobTags",
+                },
+            },
+        },
+    },
+};
+const BlobTags = {
+    serializedName: "BlobTags",
+    xmlName: "Tags",
+    type: {
+        name: "Composite",
+        className: "BlobTags",
+        modelProperties: {
+            blobTagSet: {
+                serializedName: "BlobTagSet",
+                required: true,
+                xmlName: "TagSet",
+                xmlIsWrapped: true,
+                xmlElementName: "Tag",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "BlobTag",
+                        },
+                    },
+                },
+            },
+        },
+    },
+};
+const BlobTag = {
+    serializedName: "BlobTag",
+    xmlName: "Tag",
+    type: {
+        name: "Composite",
+        className: "BlobTag",
+        modelProperties: {
+            key: {
+                serializedName: "Key",
+                required: true,
+                xmlName: "Key",
+                type: {
+                    name: "String",
+                },
+            },
+            value: {
+                serializedName: "Value",
+                required: true,
+                xmlName: "Value",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const SignedIdentifier = {
+    serializedName: "SignedIdentifier",
+    xmlName: "SignedIdentifier",
+    type: {
+        name: "Composite",
+        className: "SignedIdentifier",
+        modelProperties: {
+            id: {
+                serializedName: "Id",
+                required: true,
+                xmlName: "Id",
+                type: {
+                    name: "String",
+                },
+            },
+            accessPolicy: {
+                serializedName: "AccessPolicy",
+                xmlName: "AccessPolicy",
+                type: {
+                    name: "Composite",
+                    className: "AccessPolicy",
+                },
+            },
+        },
+    },
+};
+const AccessPolicy = {
+    serializedName: "AccessPolicy",
+    type: {
+        name: "Composite",
+        className: "AccessPolicy",
+        modelProperties: {
+            startsOn: {
+                serializedName: "Start",
+                xmlName: "Start",
+                type: {
+                    name: "String",
+                },
+            },
+            expiresOn: {
+                serializedName: "Expiry",
+                xmlName: "Expiry",
+                type: {
+                    name: "String",
+                },
+            },
+            permissions: {
+                serializedName: "Permission",
+                xmlName: "Permission",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ListBlobsFlatSegmentResponse = {
+    serializedName: "ListBlobsFlatSegmentResponse",
+    xmlName: "EnumerationResults",
+    type: {
+        name: "Composite",
+        className: "ListBlobsFlatSegmentResponse",
+        modelProperties: {
+            serviceEndpoint: {
+                serializedName: "ServiceEndpoint",
+                required: true,
+                xmlName: "ServiceEndpoint",
+                xmlIsAttribute: true,
+                type: {
+                    name: "String",
+                },
+            },
+            containerName: {
+                serializedName: "ContainerName",
+                required: true,
+                xmlName: "ContainerName",
+                xmlIsAttribute: true,
+                type: {
+                    name: "String",
+                },
+            },
+            prefix: {
+                serializedName: "Prefix",
+                xmlName: "Prefix",
+                type: {
+                    name: "String",
+                },
+            },
+            marker: {
+                serializedName: "Marker",
+                xmlName: "Marker",
+                type: {
+                    name: "String",
+                },
+            },
+            maxPageSize: {
+                serializedName: "MaxResults",
+                xmlName: "MaxResults",
+                type: {
+                    name: "Number",
+                },
+            },
+            segment: {
+                serializedName: "Segment",
+                xmlName: "Blobs",
+                type: {
+                    name: "Composite",
+                    className: "BlobFlatListSegment",
+                },
+            },
+            continuationToken: {
+                serializedName: "NextMarker",
+                xmlName: "NextMarker",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobFlatListSegment = {
+    serializedName: "BlobFlatListSegment",
+    xmlName: "Blobs",
+    type: {
+        name: "Composite",
+        className: "BlobFlatListSegment",
+        modelProperties: {
+            blobItems: {
+                serializedName: "BlobItems",
+                required: true,
+                xmlName: "BlobItems",
+                xmlElementName: "Blob",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "BlobItemInternal",
+                        },
+                    },
+                },
+            },
+        },
+    },
+};
+const BlobItemInternal = {
+    serializedName: "BlobItemInternal",
+    xmlName: "Blob",
+    type: {
+        name: "Composite",
+        className: "BlobItemInternal",
+        modelProperties: {
+            name: {
+                serializedName: "Name",
+                xmlName: "Name",
+                type: {
+                    name: "Composite",
+                    className: "BlobName",
+                },
+            },
+            deleted: {
+                serializedName: "Deleted",
+                required: true,
+                xmlName: "Deleted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            snapshot: {
+                serializedName: "Snapshot",
+                required: true,
+                xmlName: "Snapshot",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "VersionId",
+                xmlName: "VersionId",
+                type: {
+                    name: "String",
+                },
+            },
+            isCurrentVersion: {
+                serializedName: "IsCurrentVersion",
+                xmlName: "IsCurrentVersion",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            properties: {
+                serializedName: "Properties",
+                xmlName: "Properties",
+                type: {
+                    name: "Composite",
+                    className: "BlobPropertiesInternal",
+                },
+            },
+            metadata: {
+                serializedName: "Metadata",
+                xmlName: "Metadata",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            blobTags: {
+                serializedName: "BlobTags",
+                xmlName: "Tags",
+                type: {
+                    name: "Composite",
+                    className: "BlobTags",
+                },
+            },
+            objectReplicationMetadata: {
+                serializedName: "ObjectReplicationMetadata",
+                xmlName: "OrMetadata",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            hasVersionsOnly: {
+                serializedName: "HasVersionsOnly",
+                xmlName: "HasVersionsOnly",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const BlobName = {
+    serializedName: "BlobName",
+    type: {
+        name: "Composite",
+        className: "BlobName",
+        modelProperties: {
+            encoded: {
+                serializedName: "Encoded",
+                xmlName: "Encoded",
+                xmlIsAttribute: true,
+                type: {
+                    name: "Boolean",
+                },
+            },
+            content: {
+                serializedName: "content",
+                xmlName: "content",
+                xmlIsMsText: true,
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobPropertiesInternal = {
+    serializedName: "BlobPropertiesInternal",
+    xmlName: "Properties",
+    type: {
+        name: "Composite",
+        className: "BlobPropertiesInternal",
+        modelProperties: {
+            createdOn: {
+                serializedName: "Creation-Time",
+                xmlName: "Creation-Time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            lastModified: {
+                serializedName: "Last-Modified",
+                required: true,
+                xmlName: "Last-Modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            etag: {
+                serializedName: "Etag",
+                required: true,
+                xmlName: "Etag",
+                type: {
+                    name: "String",
+                },
+            },
+            contentLength: {
+                serializedName: "Content-Length",
+                xmlName: "Content-Length",
+                type: {
+                    name: "Number",
+                },
+            },
+            contentType: {
+                serializedName: "Content-Type",
+                xmlName: "Content-Type",
+                type: {
+                    name: "String",
+                },
+            },
+            contentEncoding: {
+                serializedName: "Content-Encoding",
+                xmlName: "Content-Encoding",
+                type: {
+                    name: "String",
+                },
+            },
+            contentLanguage: {
+                serializedName: "Content-Language",
+                xmlName: "Content-Language",
+                type: {
+                    name: "String",
+                },
+            },
+            contentMD5: {
+                serializedName: "Content-MD5",
+                xmlName: "Content-MD5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            contentDisposition: {
+                serializedName: "Content-Disposition",
+                xmlName: "Content-Disposition",
+                type: {
+                    name: "String",
+                },
+            },
+            cacheControl: {
+                serializedName: "Cache-Control",
+                xmlName: "Cache-Control",
+                type: {
+                    name: "String",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            blobType: {
+                serializedName: "BlobType",
+                xmlName: "BlobType",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"],
+                },
+            },
+            leaseStatus: {
+                serializedName: "LeaseStatus",
+                xmlName: "LeaseStatus",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["locked", "unlocked"],
+                },
+            },
+            leaseState: {
+                serializedName: "LeaseState",
+                xmlName: "LeaseState",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "available",
+                        "leased",
+                        "expired",
+                        "breaking",
+                        "broken",
+                    ],
+                },
+            },
+            leaseDuration: {
+                serializedName: "LeaseDuration",
+                xmlName: "LeaseDuration",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["infinite", "fixed"],
+                },
+            },
+            copyId: {
+                serializedName: "CopyId",
+                xmlName: "CopyId",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                serializedName: "CopyStatus",
+                xmlName: "CopyStatus",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["pending", "success", "aborted", "failed"],
+                },
+            },
+            copySource: {
+                serializedName: "CopySource",
+                xmlName: "CopySource",
+                type: {
+                    name: "String",
+                },
+            },
+            copyProgress: {
+                serializedName: "CopyProgress",
+                xmlName: "CopyProgress",
+                type: {
+                    name: "String",
+                },
+            },
+            copyCompletedOn: {
+                serializedName: "CopyCompletionTime",
+                xmlName: "CopyCompletionTime",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyStatusDescription: {
+                serializedName: "CopyStatusDescription",
+                xmlName: "CopyStatusDescription",
+                type: {
+                    name: "String",
+                },
+            },
+            serverEncrypted: {
+                serializedName: "ServerEncrypted",
+                xmlName: "ServerEncrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            incrementalCopy: {
+                serializedName: "IncrementalCopy",
+                xmlName: "IncrementalCopy",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            destinationSnapshot: {
+                serializedName: "DestinationSnapshot",
+                xmlName: "DestinationSnapshot",
+                type: {
+                    name: "String",
+                },
+            },
+            deletedOn: {
+                serializedName: "DeletedTime",
+                xmlName: "DeletedTime",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            remainingRetentionDays: {
+                serializedName: "RemainingRetentionDays",
+                xmlName: "RemainingRetentionDays",
+                type: {
+                    name: "Number",
+                },
+            },
+            accessTier: {
+                serializedName: "AccessTier",
+                xmlName: "AccessTier",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "P4",
+                        "P6",
+                        "P10",
+                        "P15",
+                        "P20",
+                        "P30",
+                        "P40",
+                        "P50",
+                        "P60",
+                        "P70",
+                        "P80",
+                        "Hot",
+                        "Cool",
+                        "Archive",
+                        "Cold",
+                    ],
+                },
+            },
+            accessTierInferred: {
+                serializedName: "AccessTierInferred",
+                xmlName: "AccessTierInferred",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            archiveStatus: {
+                serializedName: "ArchiveStatus",
+                xmlName: "ArchiveStatus",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "rehydrate-pending-to-hot",
+                        "rehydrate-pending-to-cool",
+                        "rehydrate-pending-to-cold",
+                    ],
+                },
+            },
+            customerProvidedKeySha256: {
+                serializedName: "CustomerProvidedKeySha256",
+                xmlName: "CustomerProvidedKeySha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "EncryptionScope",
+                xmlName: "EncryptionScope",
+                type: {
+                    name: "String",
+                },
+            },
+            accessTierChangedOn: {
+                serializedName: "AccessTierChangeTime",
+                xmlName: "AccessTierChangeTime",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            tagCount: {
+                serializedName: "TagCount",
+                xmlName: "TagCount",
+                type: {
+                    name: "Number",
+                },
+            },
+            expiresOn: {
+                serializedName: "Expiry-Time",
+                xmlName: "Expiry-Time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isSealed: {
+                serializedName: "Sealed",
+                xmlName: "Sealed",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            rehydratePriority: {
+                serializedName: "RehydratePriority",
+                xmlName: "RehydratePriority",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["High", "Standard"],
+                },
+            },
+            lastAccessedOn: {
+                serializedName: "LastAccessTime",
+                xmlName: "LastAccessTime",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyExpiresOn: {
+                serializedName: "ImmutabilityPolicyUntilDate",
+                xmlName: "ImmutabilityPolicyUntilDate",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyMode: {
+                serializedName: "ImmutabilityPolicyMode",
+                xmlName: "ImmutabilityPolicyMode",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["Mutable", "Unlocked", "Locked"],
+                },
+            },
+            legalHold: {
+                serializedName: "LegalHold",
+                xmlName: "LegalHold",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const ListBlobsHierarchySegmentResponse = {
+    serializedName: "ListBlobsHierarchySegmentResponse",
+    xmlName: "EnumerationResults",
+    type: {
+        name: "Composite",
+        className: "ListBlobsHierarchySegmentResponse",
+        modelProperties: {
+            serviceEndpoint: {
+                serializedName: "ServiceEndpoint",
+                required: true,
+                xmlName: "ServiceEndpoint",
+                xmlIsAttribute: true,
+                type: {
+                    name: "String",
+                },
+            },
+            containerName: {
+                serializedName: "ContainerName",
+                required: true,
+                xmlName: "ContainerName",
+                xmlIsAttribute: true,
+                type: {
+                    name: "String",
+                },
+            },
+            prefix: {
+                serializedName: "Prefix",
+                xmlName: "Prefix",
+                type: {
+                    name: "String",
+                },
+            },
+            marker: {
+                serializedName: "Marker",
+                xmlName: "Marker",
+                type: {
+                    name: "String",
+                },
+            },
+            maxPageSize: {
+                serializedName: "MaxResults",
+                xmlName: "MaxResults",
+                type: {
+                    name: "Number",
+                },
+            },
+            delimiter: {
+                serializedName: "Delimiter",
+                xmlName: "Delimiter",
+                type: {
+                    name: "String",
+                },
+            },
+            segment: {
+                serializedName: "Segment",
+                xmlName: "Blobs",
+                type: {
+                    name: "Composite",
+                    className: "BlobHierarchyListSegment",
+                },
+            },
+            continuationToken: {
+                serializedName: "NextMarker",
+                xmlName: "NextMarker",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobHierarchyListSegment = {
+    serializedName: "BlobHierarchyListSegment",
+    xmlName: "Blobs",
+    type: {
+        name: "Composite",
+        className: "BlobHierarchyListSegment",
+        modelProperties: {
+            blobPrefixes: {
+                serializedName: "BlobPrefixes",
+                xmlName: "BlobPrefixes",
+                xmlElementName: "BlobPrefix",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "BlobPrefix",
+                        },
+                    },
+                },
+            },
+            blobItems: {
+                serializedName: "BlobItems",
+                required: true,
+                xmlName: "BlobItems",
+                xmlElementName: "Blob",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "BlobItemInternal",
+                        },
+                    },
+                },
+            },
+        },
+    },
+};
+const BlobPrefix = {
+    serializedName: "BlobPrefix",
+    type: {
+        name: "Composite",
+        className: "BlobPrefix",
+        modelProperties: {
+            name: {
+                serializedName: "Name",
+                xmlName: "Name",
+                type: {
+                    name: "Composite",
+                    className: "BlobName",
+                },
+            },
+        },
+    },
+};
+const BlockLookupList = {
+    serializedName: "BlockLookupList",
+    xmlName: "BlockList",
+    type: {
+        name: "Composite",
+        className: "BlockLookupList",
+        modelProperties: {
+            committed: {
+                serializedName: "Committed",
+                xmlName: "Committed",
+                xmlElementName: "Committed",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "String",
+                        },
+                    },
+                },
+            },
+            uncommitted: {
+                serializedName: "Uncommitted",
+                xmlName: "Uncommitted",
+                xmlElementName: "Uncommitted",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "String",
+                        },
+                    },
+                },
+            },
+            latest: {
+                serializedName: "Latest",
+                xmlName: "Latest",
+                xmlElementName: "Latest",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "String",
+                        },
+                    },
+                },
+            },
+        },
+    },
+};
+const BlockList = {
+    serializedName: "BlockList",
+    type: {
+        name: "Composite",
+        className: "BlockList",
+        modelProperties: {
+            committedBlocks: {
+                serializedName: "CommittedBlocks",
+                xmlName: "CommittedBlocks",
+                xmlIsWrapped: true,
+                xmlElementName: "Block",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "Block",
+                        },
+                    },
+                },
+            },
+            uncommittedBlocks: {
+                serializedName: "UncommittedBlocks",
+                xmlName: "UncommittedBlocks",
+                xmlIsWrapped: true,
+                xmlElementName: "Block",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "Block",
+                        },
+                    },
+                },
+            },
+        },
+    },
+};
+const Block = {
+    serializedName: "Block",
+    type: {
+        name: "Composite",
+        className: "Block",
+        modelProperties: {
+            name: {
+                serializedName: "Name",
+                required: true,
+                xmlName: "Name",
+                type: {
+                    name: "String",
+                },
+            },
+            size: {
+                serializedName: "Size",
+                required: true,
+                xmlName: "Size",
+                type: {
+                    name: "Number",
+                },
+            },
+        },
+    },
+};
+const PageList = {
+    serializedName: "PageList",
+    type: {
+        name: "Composite",
+        className: "PageList",
+        modelProperties: {
+            pageRange: {
+                serializedName: "PageRange",
+                xmlName: "PageRange",
+                xmlElementName: "PageRange",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "PageRange",
+                        },
+                    },
+                },
+            },
+            clearRange: {
+                serializedName: "ClearRange",
+                xmlName: "ClearRange",
+                xmlElementName: "ClearRange",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "ClearRange",
+                        },
+                    },
+                },
+            },
+            continuationToken: {
+                serializedName: "NextMarker",
+                xmlName: "NextMarker",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageRange = {
+    serializedName: "PageRange",
+    xmlName: "PageRange",
+    type: {
+        name: "Composite",
+        className: "PageRange",
+        modelProperties: {
+            start: {
+                serializedName: "Start",
+                required: true,
+                xmlName: "Start",
+                type: {
+                    name: "Number",
+                },
+            },
+            end: {
+                serializedName: "End",
+                required: true,
+                xmlName: "End",
+                type: {
+                    name: "Number",
+                },
+            },
+        },
+    },
+};
+const ClearRange = {
+    serializedName: "ClearRange",
+    xmlName: "ClearRange",
+    type: {
+        name: "Composite",
+        className: "ClearRange",
+        modelProperties: {
+            start: {
+                serializedName: "Start",
+                required: true,
+                xmlName: "Start",
+                type: {
+                    name: "Number",
+                },
+            },
+            end: {
+                serializedName: "End",
+                required: true,
+                xmlName: "End",
+                type: {
+                    name: "Number",
+                },
+            },
+        },
+    },
+};
+const QueryRequest = {
+    serializedName: "QueryRequest",
+    xmlName: "QueryRequest",
+    type: {
+        name: "Composite",
+        className: "QueryRequest",
+        modelProperties: {
+            queryType: {
+                serializedName: "QueryType",
+                required: true,
+                xmlName: "QueryType",
+                type: {
+                    name: "String",
+                },
+            },
+            expression: {
+                serializedName: "Expression",
+                required: true,
+                xmlName: "Expression",
+                type: {
+                    name: "String",
+                },
+            },
+            inputSerialization: {
+                serializedName: "InputSerialization",
+                xmlName: "InputSerialization",
+                type: {
+                    name: "Composite",
+                    className: "QuerySerialization",
+                },
+            },
+            outputSerialization: {
+                serializedName: "OutputSerialization",
+                xmlName: "OutputSerialization",
+                type: {
+                    name: "Composite",
+                    className: "QuerySerialization",
+                },
+            },
+        },
+    },
+};
+const QuerySerialization = {
+    serializedName: "QuerySerialization",
+    type: {
+        name: "Composite",
+        className: "QuerySerialization",
+        modelProperties: {
+            format: {
+                serializedName: "Format",
+                xmlName: "Format",
+                type: {
+                    name: "Composite",
+                    className: "QueryFormat",
+                },
+            },
+        },
+    },
+};
+const QueryFormat = {
+    serializedName: "QueryFormat",
+    type: {
+        name: "Composite",
+        className: "QueryFormat",
+        modelProperties: {
+            type: {
+                serializedName: "Type",
+                required: true,
+                xmlName: "Type",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["delimited", "json", "arrow", "parquet"],
+                },
+            },
+            delimitedTextConfiguration: {
+                serializedName: "DelimitedTextConfiguration",
+                xmlName: "DelimitedTextConfiguration",
+                type: {
+                    name: "Composite",
+                    className: "DelimitedTextConfiguration",
+                },
+            },
+            jsonTextConfiguration: {
+                serializedName: "JsonTextConfiguration",
+                xmlName: "JsonTextConfiguration",
+                type: {
+                    name: "Composite",
+                    className: "JsonTextConfiguration",
+                },
+            },
+            arrowConfiguration: {
+                serializedName: "ArrowConfiguration",
+                xmlName: "ArrowConfiguration",
+                type: {
+                    name: "Composite",
+                    className: "ArrowConfiguration",
+                },
+            },
+            parquetTextConfiguration: {
+                serializedName: "ParquetTextConfiguration",
+                xmlName: "ParquetTextConfiguration",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "any" } },
+                },
+            },
+        },
+    },
+};
+const DelimitedTextConfiguration = {
+    serializedName: "DelimitedTextConfiguration",
+    xmlName: "DelimitedTextConfiguration",
+    type: {
+        name: "Composite",
+        className: "DelimitedTextConfiguration",
+        modelProperties: {
+            columnSeparator: {
+                serializedName: "ColumnSeparator",
+                xmlName: "ColumnSeparator",
+                type: {
+                    name: "String",
+                },
+            },
+            fieldQuote: {
+                serializedName: "FieldQuote",
+                xmlName: "FieldQuote",
+                type: {
+                    name: "String",
+                },
+            },
+            recordSeparator: {
+                serializedName: "RecordSeparator",
+                xmlName: "RecordSeparator",
+                type: {
+                    name: "String",
+                },
+            },
+            escapeChar: {
+                serializedName: "EscapeChar",
+                xmlName: "EscapeChar",
+                type: {
+                    name: "String",
+                },
+            },
+            headersPresent: {
+                serializedName: "HeadersPresent",
+                xmlName: "HasHeaders",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const JsonTextConfiguration = {
+    serializedName: "JsonTextConfiguration",
+    xmlName: "JsonTextConfiguration",
+    type: {
+        name: "Composite",
+        className: "JsonTextConfiguration",
+        modelProperties: {
+            recordSeparator: {
+                serializedName: "RecordSeparator",
+                xmlName: "RecordSeparator",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ArrowConfiguration = {
+    serializedName: "ArrowConfiguration",
+    xmlName: "ArrowConfiguration",
+    type: {
+        name: "Composite",
+        className: "ArrowConfiguration",
+        modelProperties: {
+            schema: {
+                serializedName: "Schema",
+                required: true,
+                xmlName: "Schema",
+                xmlIsWrapped: true,
+                xmlElementName: "Field",
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: {
+                            name: "Composite",
+                            className: "ArrowField",
+                        },
+                    },
+                },
+            },
+        },
+    },
+};
+const ArrowField = {
+    serializedName: "ArrowField",
+    xmlName: "Field",
+    type: {
+        name: "Composite",
+        className: "ArrowField",
+        modelProperties: {
+            type: {
+                serializedName: "Type",
+                required: true,
+                xmlName: "Type",
+                type: {
+                    name: "String",
+                },
+            },
+            name: {
+                serializedName: "Name",
+                xmlName: "Name",
+                type: {
+                    name: "String",
+                },
+            },
+            precision: {
+                serializedName: "Precision",
+                xmlName: "Precision",
+                type: {
+                    name: "Number",
+                },
+            },
+            scale: {
+                serializedName: "Scale",
+                xmlName: "Scale",
+                type: {
+                    name: "Number",
+                },
+            },
+        },
+    },
+};
+const ServiceSetPropertiesHeaders = {
+    serializedName: "Service_setPropertiesHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceSetPropertiesHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceSetPropertiesExceptionHeaders = {
+    serializedName: "Service_setPropertiesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceSetPropertiesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetPropertiesHeaders = {
+    serializedName: "Service_getPropertiesHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetPropertiesHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetPropertiesExceptionHeaders = {
+    serializedName: "Service_getPropertiesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetPropertiesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetStatisticsHeaders = {
+    serializedName: "Service_getStatisticsHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetStatisticsHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetStatisticsExceptionHeaders = {
+    serializedName: "Service_getStatisticsExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetStatisticsExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceListContainersSegmentHeaders = {
+    serializedName: "Service_listContainersSegmentHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceListContainersSegmentHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceListContainersSegmentExceptionHeaders = {
+    serializedName: "Service_listContainersSegmentExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceListContainersSegmentExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetUserDelegationKeyHeaders = {
+    serializedName: "Service_getUserDelegationKeyHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetUserDelegationKeyHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetUserDelegationKeyExceptionHeaders = {
+    serializedName: "Service_getUserDelegationKeyExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetUserDelegationKeyExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetAccountInfoHeaders = {
+    serializedName: "Service_getAccountInfoHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetAccountInfoHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            skuName: {
+                serializedName: "x-ms-sku-name",
+                xmlName: "x-ms-sku-name",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "Standard_LRS",
+                        "Standard_GRS",
+                        "Standard_RAGRS",
+                        "Standard_ZRS",
+                        "Premium_LRS",
+                    ],
+                },
+            },
+            accountKind: {
+                serializedName: "x-ms-account-kind",
+                xmlName: "x-ms-account-kind",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "Storage",
+                        "BlobStorage",
+                        "StorageV2",
+                        "FileStorage",
+                        "BlockBlobStorage",
+                    ],
+                },
+            },
+            isHierarchicalNamespaceEnabled: {
+                serializedName: "x-ms-is-hns-enabled",
+                xmlName: "x-ms-is-hns-enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceGetAccountInfoExceptionHeaders = {
+    serializedName: "Service_getAccountInfoExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceGetAccountInfoExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceSubmitBatchHeaders = {
+    serializedName: "Service_submitBatchHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceSubmitBatchHeaders",
+        modelProperties: {
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceSubmitBatchExceptionHeaders = {
+    serializedName: "Service_submitBatchExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceSubmitBatchExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceFilterBlobsHeaders = {
+    serializedName: "Service_filterBlobsHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceFilterBlobsHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ServiceFilterBlobsExceptionHeaders = {
+    serializedName: "Service_filterBlobsExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ServiceFilterBlobsExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerCreateHeaders = {
+    serializedName: "Container_createHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerCreateHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerCreateExceptionHeaders = {
+    serializedName: "Container_createExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerCreateExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerGetPropertiesHeaders = {
+    serializedName: "Container_getPropertiesHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerGetPropertiesHeaders",
+        modelProperties: {
+            metadata: {
+                serializedName: "x-ms-meta",
+                headerCollectionPrefix: "x-ms-meta-",
+                xmlName: "x-ms-meta",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseDuration: {
+                serializedName: "x-ms-lease-duration",
+                xmlName: "x-ms-lease-duration",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["infinite", "fixed"],
+                },
+            },
+            leaseState: {
+                serializedName: "x-ms-lease-state",
+                xmlName: "x-ms-lease-state",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "available",
+                        "leased",
+                        "expired",
+                        "breaking",
+                        "broken",
+                    ],
+                },
+            },
+            leaseStatus: {
+                serializedName: "x-ms-lease-status",
+                xmlName: "x-ms-lease-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["locked", "unlocked"],
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobPublicAccess: {
+                serializedName: "x-ms-blob-public-access",
+                xmlName: "x-ms-blob-public-access",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["container", "blob"],
+                },
+            },
+            hasImmutabilityPolicy: {
+                serializedName: "x-ms-has-immutability-policy",
+                xmlName: "x-ms-has-immutability-policy",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            hasLegalHold: {
+                serializedName: "x-ms-has-legal-hold",
+                xmlName: "x-ms-has-legal-hold",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            defaultEncryptionScope: {
+                serializedName: "x-ms-default-encryption-scope",
+                xmlName: "x-ms-default-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            denyEncryptionScopeOverride: {
+                serializedName: "x-ms-deny-encryption-scope-override",
+                xmlName: "x-ms-deny-encryption-scope-override",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            isImmutableStorageWithVersioningEnabled: {
+                serializedName: "x-ms-immutable-storage-with-versioning-enabled",
+                xmlName: "x-ms-immutable-storage-with-versioning-enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerGetPropertiesExceptionHeaders = {
+    serializedName: "Container_getPropertiesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerGetPropertiesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerDeleteHeaders = {
+    serializedName: "Container_deleteHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerDeleteHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerDeleteExceptionHeaders = {
+    serializedName: "Container_deleteExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerDeleteExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerSetMetadataHeaders = {
+    serializedName: "Container_setMetadataHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerSetMetadataHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerSetMetadataExceptionHeaders = {
+    serializedName: "Container_setMetadataExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerSetMetadataExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerGetAccessPolicyHeaders = {
+    serializedName: "Container_getAccessPolicyHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerGetAccessPolicyHeaders",
+        modelProperties: {
+            blobPublicAccess: {
+                serializedName: "x-ms-blob-public-access",
+                xmlName: "x-ms-blob-public-access",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["container", "blob"],
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerGetAccessPolicyExceptionHeaders = {
+    serializedName: "Container_getAccessPolicyExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerGetAccessPolicyExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerSetAccessPolicyHeaders = {
+    serializedName: "Container_setAccessPolicyHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerSetAccessPolicyHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerSetAccessPolicyExceptionHeaders = {
+    serializedName: "Container_setAccessPolicyExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerSetAccessPolicyExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerRestoreHeaders = {
+    serializedName: "Container_restoreHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerRestoreHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerRestoreExceptionHeaders = {
+    serializedName: "Container_restoreExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerRestoreExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerRenameHeaders = {
+    serializedName: "Container_renameHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerRenameHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerRenameExceptionHeaders = {
+    serializedName: "Container_renameExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerRenameExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerSubmitBatchHeaders = {
+    serializedName: "Container_submitBatchHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerSubmitBatchHeaders",
+        modelProperties: {
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerSubmitBatchExceptionHeaders = {
+    serializedName: "Container_submitBatchExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerSubmitBatchExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerFilterBlobsHeaders = {
+    serializedName: "Container_filterBlobsHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerFilterBlobsHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ContainerFilterBlobsExceptionHeaders = {
+    serializedName: "Container_filterBlobsExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerFilterBlobsExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerAcquireLeaseHeaders = {
+    serializedName: "Container_acquireLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerAcquireLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseId: {
+                serializedName: "x-ms-lease-id",
+                xmlName: "x-ms-lease-id",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ContainerAcquireLeaseExceptionHeaders = {
+    serializedName: "Container_acquireLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerAcquireLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerReleaseLeaseHeaders = {
+    serializedName: "Container_releaseLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerReleaseLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ContainerReleaseLeaseExceptionHeaders = {
+    serializedName: "Container_releaseLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerReleaseLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerRenewLeaseHeaders = {
+    serializedName: "Container_renewLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerRenewLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseId: {
+                serializedName: "x-ms-lease-id",
+                xmlName: "x-ms-lease-id",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ContainerRenewLeaseExceptionHeaders = {
+    serializedName: "Container_renewLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerRenewLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerBreakLeaseHeaders = {
+    serializedName: "Container_breakLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerBreakLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseTime: {
+                serializedName: "x-ms-lease-time",
+                xmlName: "x-ms-lease-time",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ContainerBreakLeaseExceptionHeaders = {
+    serializedName: "Container_breakLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerBreakLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerChangeLeaseHeaders = {
+    serializedName: "Container_changeLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerChangeLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseId: {
+                serializedName: "x-ms-lease-id",
+                xmlName: "x-ms-lease-id",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const ContainerChangeLeaseExceptionHeaders = {
+    serializedName: "Container_changeLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerChangeLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerListBlobFlatSegmentHeaders = {
+    serializedName: "Container_listBlobFlatSegmentHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerListBlobFlatSegmentHeaders",
+        modelProperties: {
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerListBlobFlatSegmentExceptionHeaders = {
+    serializedName: "Container_listBlobFlatSegmentExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerListBlobFlatSegmentExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerListBlobHierarchySegmentHeaders = {
+    serializedName: "Container_listBlobHierarchySegmentHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerListBlobHierarchySegmentHeaders",
+        modelProperties: {
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerListBlobHierarchySegmentExceptionHeaders = {
+    serializedName: "Container_listBlobHierarchySegmentExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerListBlobHierarchySegmentExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const ContainerGetAccountInfoHeaders = {
+    serializedName: "Container_getAccountInfoHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerGetAccountInfoHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            skuName: {
+                serializedName: "x-ms-sku-name",
+                xmlName: "x-ms-sku-name",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "Standard_LRS",
+                        "Standard_GRS",
+                        "Standard_RAGRS",
+                        "Standard_ZRS",
+                        "Premium_LRS",
+                    ],
+                },
+            },
+            accountKind: {
+                serializedName: "x-ms-account-kind",
+                xmlName: "x-ms-account-kind",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "Storage",
+                        "BlobStorage",
+                        "StorageV2",
+                        "FileStorage",
+                        "BlockBlobStorage",
+                    ],
+                },
+            },
+            isHierarchicalNamespaceEnabled: {
+                serializedName: "x-ms-is-hns-enabled",
+                xmlName: "x-ms-is-hns-enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const ContainerGetAccountInfoExceptionHeaders = {
+    serializedName: "Container_getAccountInfoExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "ContainerGetAccountInfoExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobDownloadHeaders = {
+    serializedName: "Blob_downloadHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobDownloadHeaders",
+        modelProperties: {
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            createdOn: {
+                serializedName: "x-ms-creation-time",
+                xmlName: "x-ms-creation-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            metadata: {
+                serializedName: "x-ms-meta",
+                headerCollectionPrefix: "x-ms-meta-",
+                xmlName: "x-ms-meta",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            objectReplicationPolicyId: {
+                serializedName: "x-ms-or-policy-id",
+                xmlName: "x-ms-or-policy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            objectReplicationRules: {
+                serializedName: "x-ms-or",
+                headerCollectionPrefix: "x-ms-or-",
+                xmlName: "x-ms-or",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            contentLength: {
+                serializedName: "content-length",
+                xmlName: "content-length",
+                type: {
+                    name: "Number",
+                },
+            },
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            contentRange: {
+                serializedName: "content-range",
+                xmlName: "content-range",
+                type: {
+                    name: "String",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            contentEncoding: {
+                serializedName: "content-encoding",
+                xmlName: "content-encoding",
+                type: {
+                    name: "String",
+                },
+            },
+            cacheControl: {
+                serializedName: "cache-control",
+                xmlName: "cache-control",
+                type: {
+                    name: "String",
+                },
+            },
+            contentDisposition: {
+                serializedName: "content-disposition",
+                xmlName: "content-disposition",
+                type: {
+                    name: "String",
+                },
+            },
+            contentLanguage: {
+                serializedName: "content-language",
+                xmlName: "content-language",
+                type: {
+                    name: "String",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            blobType: {
+                serializedName: "x-ms-blob-type",
+                xmlName: "x-ms-blob-type",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"],
+                },
+            },
+            copyCompletedOn: {
+                serializedName: "x-ms-copy-completion-time",
+                xmlName: "x-ms-copy-completion-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyStatusDescription: {
+                serializedName: "x-ms-copy-status-description",
+                xmlName: "x-ms-copy-status-description",
+                type: {
+                    name: "String",
+                },
+            },
+            copyId: {
+                serializedName: "x-ms-copy-id",
+                xmlName: "x-ms-copy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            copyProgress: {
+                serializedName: "x-ms-copy-progress",
+                xmlName: "x-ms-copy-progress",
+                type: {
+                    name: "String",
+                },
+            },
+            copySource: {
+                serializedName: "x-ms-copy-source",
+                xmlName: "x-ms-copy-source",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                serializedName: "x-ms-copy-status",
+                xmlName: "x-ms-copy-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["pending", "success", "aborted", "failed"],
+                },
+            },
+            leaseDuration: {
+                serializedName: "x-ms-lease-duration",
+                xmlName: "x-ms-lease-duration",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["infinite", "fixed"],
+                },
+            },
+            leaseState: {
+                serializedName: "x-ms-lease-state",
+                xmlName: "x-ms-lease-state",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "available",
+                        "leased",
+                        "expired",
+                        "breaking",
+                        "broken",
+                    ],
+                },
+            },
+            leaseStatus: {
+                serializedName: "x-ms-lease-status",
+                xmlName: "x-ms-lease-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["locked", "unlocked"],
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            isCurrentVersion: {
+                serializedName: "x-ms-is-current-version",
+                xmlName: "x-ms-is-current-version",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            acceptRanges: {
+                serializedName: "accept-ranges",
+                xmlName: "accept-ranges",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobCommittedBlockCount: {
+                serializedName: "x-ms-blob-committed-block-count",
+                xmlName: "x-ms-blob-committed-block-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-server-encrypted",
+                xmlName: "x-ms-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            blobContentMD5: {
+                serializedName: "x-ms-blob-content-md5",
+                xmlName: "x-ms-blob-content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            tagCount: {
+                serializedName: "x-ms-tag-count",
+                xmlName: "x-ms-tag-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            isSealed: {
+                serializedName: "x-ms-blob-sealed",
+                xmlName: "x-ms-blob-sealed",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            lastAccessed: {
+                serializedName: "x-ms-last-access-time",
+                xmlName: "x-ms-last-access-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyExpiresOn: {
+                serializedName: "x-ms-immutability-policy-until-date",
+                xmlName: "x-ms-immutability-policy-until-date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyMode: {
+                serializedName: "x-ms-immutability-policy-mode",
+                xmlName: "x-ms-immutability-policy-mode",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["Mutable", "Unlocked", "Locked"],
+                },
+            },
+            legalHold: {
+                serializedName: "x-ms-legal-hold",
+                xmlName: "x-ms-legal-hold",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+            contentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+        },
+    },
+};
+const BlobDownloadExceptionHeaders = {
+    serializedName: "Blob_downloadExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobDownloadExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobGetPropertiesHeaders = {
+    serializedName: "Blob_getPropertiesHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobGetPropertiesHeaders",
+        modelProperties: {
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            createdOn: {
+                serializedName: "x-ms-creation-time",
+                xmlName: "x-ms-creation-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            metadata: {
+                serializedName: "x-ms-meta",
+                headerCollectionPrefix: "x-ms-meta-",
+                xmlName: "x-ms-meta",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            objectReplicationPolicyId: {
+                serializedName: "x-ms-or-policy-id",
+                xmlName: "x-ms-or-policy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            objectReplicationRules: {
+                serializedName: "x-ms-or",
+                headerCollectionPrefix: "x-ms-or-",
+                xmlName: "x-ms-or",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            blobType: {
+                serializedName: "x-ms-blob-type",
+                xmlName: "x-ms-blob-type",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"],
+                },
+            },
+            copyCompletedOn: {
+                serializedName: "x-ms-copy-completion-time",
+                xmlName: "x-ms-copy-completion-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyStatusDescription: {
+                serializedName: "x-ms-copy-status-description",
+                xmlName: "x-ms-copy-status-description",
+                type: {
+                    name: "String",
+                },
+            },
+            copyId: {
+                serializedName: "x-ms-copy-id",
+                xmlName: "x-ms-copy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            copyProgress: {
+                serializedName: "x-ms-copy-progress",
+                xmlName: "x-ms-copy-progress",
+                type: {
+                    name: "String",
+                },
+            },
+            copySource: {
+                serializedName: "x-ms-copy-source",
+                xmlName: "x-ms-copy-source",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                serializedName: "x-ms-copy-status",
+                xmlName: "x-ms-copy-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["pending", "success", "aborted", "failed"],
+                },
+            },
+            isIncrementalCopy: {
+                serializedName: "x-ms-incremental-copy",
+                xmlName: "x-ms-incremental-copy",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            destinationSnapshot: {
+                serializedName: "x-ms-copy-destination-snapshot",
+                xmlName: "x-ms-copy-destination-snapshot",
+                type: {
+                    name: "String",
+                },
+            },
+            leaseDuration: {
+                serializedName: "x-ms-lease-duration",
+                xmlName: "x-ms-lease-duration",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["infinite", "fixed"],
+                },
+            },
+            leaseState: {
+                serializedName: "x-ms-lease-state",
+                xmlName: "x-ms-lease-state",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "available",
+                        "leased",
+                        "expired",
+                        "breaking",
+                        "broken",
+                    ],
+                },
+            },
+            leaseStatus: {
+                serializedName: "x-ms-lease-status",
+                xmlName: "x-ms-lease-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["locked", "unlocked"],
+                },
+            },
+            contentLength: {
+                serializedName: "content-length",
+                xmlName: "content-length",
+                type: {
+                    name: "Number",
+                },
+            },
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            contentEncoding: {
+                serializedName: "content-encoding",
+                xmlName: "content-encoding",
+                type: {
+                    name: "String",
+                },
+            },
+            contentDisposition: {
+                serializedName: "content-disposition",
+                xmlName: "content-disposition",
+                type: {
+                    name: "String",
+                },
+            },
+            contentLanguage: {
+                serializedName: "content-language",
+                xmlName: "content-language",
+                type: {
+                    name: "String",
+                },
+            },
+            cacheControl: {
+                serializedName: "cache-control",
+                xmlName: "cache-control",
+                type: {
+                    name: "String",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            acceptRanges: {
+                serializedName: "accept-ranges",
+                xmlName: "accept-ranges",
+                type: {
+                    name: "String",
+                },
+            },
+            blobCommittedBlockCount: {
+                serializedName: "x-ms-blob-committed-block-count",
+                xmlName: "x-ms-blob-committed-block-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-server-encrypted",
+                xmlName: "x-ms-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            accessTier: {
+                serializedName: "x-ms-access-tier",
+                xmlName: "x-ms-access-tier",
+                type: {
+                    name: "String",
+                },
+            },
+            accessTierInferred: {
+                serializedName: "x-ms-access-tier-inferred",
+                xmlName: "x-ms-access-tier-inferred",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            archiveStatus: {
+                serializedName: "x-ms-archive-status",
+                xmlName: "x-ms-archive-status",
+                type: {
+                    name: "String",
+                },
+            },
+            accessTierChangedOn: {
+                serializedName: "x-ms-access-tier-change-time",
+                xmlName: "x-ms-access-tier-change-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            isCurrentVersion: {
+                serializedName: "x-ms-is-current-version",
+                xmlName: "x-ms-is-current-version",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            tagCount: {
+                serializedName: "x-ms-tag-count",
+                xmlName: "x-ms-tag-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            expiresOn: {
+                serializedName: "x-ms-expiry-time",
+                xmlName: "x-ms-expiry-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isSealed: {
+                serializedName: "x-ms-blob-sealed",
+                xmlName: "x-ms-blob-sealed",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            rehydratePriority: {
+                serializedName: "x-ms-rehydrate-priority",
+                xmlName: "x-ms-rehydrate-priority",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["High", "Standard"],
+                },
+            },
+            lastAccessed: {
+                serializedName: "x-ms-last-access-time",
+                xmlName: "x-ms-last-access-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyExpiresOn: {
+                serializedName: "x-ms-immutability-policy-until-date",
+                xmlName: "x-ms-immutability-policy-until-date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyMode: {
+                serializedName: "x-ms-immutability-policy-mode",
+                xmlName: "x-ms-immutability-policy-mode",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["Mutable", "Unlocked", "Locked"],
+                },
+            },
+            legalHold: {
+                serializedName: "x-ms-legal-hold",
+                xmlName: "x-ms-legal-hold",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobGetPropertiesExceptionHeaders = {
+    serializedName: "Blob_getPropertiesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobGetPropertiesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobDeleteHeaders = {
+    serializedName: "Blob_deleteHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobDeleteHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobDeleteExceptionHeaders = {
+    serializedName: "Blob_deleteExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobDeleteExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobUndeleteHeaders = {
+    serializedName: "Blob_undeleteHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobUndeleteHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobUndeleteExceptionHeaders = {
+    serializedName: "Blob_undeleteExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobUndeleteExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetExpiryHeaders = {
+    serializedName: "Blob_setExpiryHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetExpiryHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobSetExpiryExceptionHeaders = {
+    serializedName: "Blob_setExpiryExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetExpiryExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetHttpHeadersHeaders = {
+    serializedName: "Blob_setHttpHeadersHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetHttpHeadersHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetHttpHeadersExceptionHeaders = {
+    serializedName: "Blob_setHttpHeadersExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetHttpHeadersExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetImmutabilityPolicyHeaders = {
+    serializedName: "Blob_setImmutabilityPolicyHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetImmutabilityPolicyHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyExpiry: {
+                serializedName: "x-ms-immutability-policy-until-date",
+                xmlName: "x-ms-immutability-policy-until-date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            immutabilityPolicyMode: {
+                serializedName: "x-ms-immutability-policy-mode",
+                xmlName: "x-ms-immutability-policy-mode",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["Mutable", "Unlocked", "Locked"],
+                },
+            },
+        },
+    },
+};
+const BlobSetImmutabilityPolicyExceptionHeaders = {
+    serializedName: "Blob_setImmutabilityPolicyExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetImmutabilityPolicyExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobDeleteImmutabilityPolicyHeaders = {
+    serializedName: "Blob_deleteImmutabilityPolicyHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobDeleteImmutabilityPolicyHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobDeleteImmutabilityPolicyExceptionHeaders = {
+    serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobDeleteImmutabilityPolicyExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetLegalHoldHeaders = {
+    serializedName: "Blob_setLegalHoldHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetLegalHoldHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            legalHold: {
+                serializedName: "x-ms-legal-hold",
+                xmlName: "x-ms-legal-hold",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const BlobSetLegalHoldExceptionHeaders = {
+    serializedName: "Blob_setLegalHoldExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetLegalHoldExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetMetadataHeaders = {
+    serializedName: "Blob_setMetadataHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetMetadataHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetMetadataExceptionHeaders = {
+    serializedName: "Blob_setMetadataExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetMetadataExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobAcquireLeaseHeaders = {
+    serializedName: "Blob_acquireLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobAcquireLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseId: {
+                serializedName: "x-ms-lease-id",
+                xmlName: "x-ms-lease-id",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobAcquireLeaseExceptionHeaders = {
+    serializedName: "Blob_acquireLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobAcquireLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobReleaseLeaseHeaders = {
+    serializedName: "Blob_releaseLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobReleaseLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobReleaseLeaseExceptionHeaders = {
+    serializedName: "Blob_releaseLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobReleaseLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobRenewLeaseHeaders = {
+    serializedName: "Blob_renewLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobRenewLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseId: {
+                serializedName: "x-ms-lease-id",
+                xmlName: "x-ms-lease-id",
+                type: {
+                    name: "String",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobRenewLeaseExceptionHeaders = {
+    serializedName: "Blob_renewLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobRenewLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobChangeLeaseHeaders = {
+    serializedName: "Blob_changeLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobChangeLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            leaseId: {
+                serializedName: "x-ms-lease-id",
+                xmlName: "x-ms-lease-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobChangeLeaseExceptionHeaders = {
+    serializedName: "Blob_changeLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobChangeLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobBreakLeaseHeaders = {
+    serializedName: "Blob_breakLeaseHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobBreakLeaseHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            leaseTime: {
+                serializedName: "x-ms-lease-time",
+                xmlName: "x-ms-lease-time",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+        },
+    },
+};
+const BlobBreakLeaseExceptionHeaders = {
+    serializedName: "Blob_breakLeaseExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobBreakLeaseExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobCreateSnapshotHeaders = {
+    serializedName: "Blob_createSnapshotHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobCreateSnapshotHeaders",
+        modelProperties: {
+            snapshot: {
+                serializedName: "x-ms-snapshot",
+                xmlName: "x-ms-snapshot",
+                type: {
+                    name: "String",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobCreateSnapshotExceptionHeaders = {
+    serializedName: "Blob_createSnapshotExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobCreateSnapshotExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobStartCopyFromURLHeaders = {
+    serializedName: "Blob_startCopyFromURLHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobStartCopyFromURLHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyId: {
+                serializedName: "x-ms-copy-id",
+                xmlName: "x-ms-copy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                serializedName: "x-ms-copy-status",
+                xmlName: "x-ms-copy-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["pending", "success", "aborted", "failed"],
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobStartCopyFromURLExceptionHeaders = {
+    serializedName: "Blob_startCopyFromURLExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobStartCopyFromURLExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobCopyFromURLHeaders = {
+    serializedName: "Blob_copyFromURLHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobCopyFromURLHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyId: {
+                serializedName: "x-ms-copy-id",
+                xmlName: "x-ms-copy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                defaultValue: "success",
+                isConstant: true,
+                serializedName: "x-ms-copy-status",
+                type: {
+                    name: "String",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobCopyFromURLExceptionHeaders = {
+    serializedName: "Blob_copyFromURLExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobCopyFromURLExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobAbortCopyFromURLHeaders = {
+    serializedName: "Blob_abortCopyFromURLHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobAbortCopyFromURLHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobAbortCopyFromURLExceptionHeaders = {
+    serializedName: "Blob_abortCopyFromURLExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobAbortCopyFromURLExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetTierHeaders = {
+    serializedName: "Blob_setTierHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetTierHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetTierExceptionHeaders = {
+    serializedName: "Blob_setTierExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetTierExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobGetAccountInfoHeaders = {
+    serializedName: "Blob_getAccountInfoHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobGetAccountInfoHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            skuName: {
+                serializedName: "x-ms-sku-name",
+                xmlName: "x-ms-sku-name",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "Standard_LRS",
+                        "Standard_GRS",
+                        "Standard_RAGRS",
+                        "Standard_ZRS",
+                        "Premium_LRS",
+                    ],
+                },
+            },
+            accountKind: {
+                serializedName: "x-ms-account-kind",
+                xmlName: "x-ms-account-kind",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "Storage",
+                        "BlobStorage",
+                        "StorageV2",
+                        "FileStorage",
+                        "BlockBlobStorage",
+                    ],
+                },
+            },
+            isHierarchicalNamespaceEnabled: {
+                serializedName: "x-ms-is-hns-enabled",
+                xmlName: "x-ms-is-hns-enabled",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const BlobGetAccountInfoExceptionHeaders = {
+    serializedName: "Blob_getAccountInfoExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobGetAccountInfoExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobQueryHeaders = {
+    serializedName: "Blob_queryHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobQueryHeaders",
+        modelProperties: {
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            metadata: {
+                serializedName: "x-ms-meta",
+                headerCollectionPrefix: "x-ms-meta-",
+                xmlName: "x-ms-meta",
+                type: {
+                    name: "Dictionary",
+                    value: { type: { name: "String" } },
+                },
+            },
+            contentLength: {
+                serializedName: "content-length",
+                xmlName: "content-length",
+                type: {
+                    name: "Number",
+                },
+            },
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            contentRange: {
+                serializedName: "content-range",
+                xmlName: "content-range",
+                type: {
+                    name: "String",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            contentEncoding: {
+                serializedName: "content-encoding",
+                xmlName: "content-encoding",
+                type: {
+                    name: "String",
+                },
+            },
+            cacheControl: {
+                serializedName: "cache-control",
+                xmlName: "cache-control",
+                type: {
+                    name: "String",
+                },
+            },
+            contentDisposition: {
+                serializedName: "content-disposition",
+                xmlName: "content-disposition",
+                type: {
+                    name: "String",
+                },
+            },
+            contentLanguage: {
+                serializedName: "content-language",
+                xmlName: "content-language",
+                type: {
+                    name: "String",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            blobType: {
+                serializedName: "x-ms-blob-type",
+                xmlName: "x-ms-blob-type",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"],
+                },
+            },
+            copyCompletionTime: {
+                serializedName: "x-ms-copy-completion-time",
+                xmlName: "x-ms-copy-completion-time",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyStatusDescription: {
+                serializedName: "x-ms-copy-status-description",
+                xmlName: "x-ms-copy-status-description",
+                type: {
+                    name: "String",
+                },
+            },
+            copyId: {
+                serializedName: "x-ms-copy-id",
+                xmlName: "x-ms-copy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            copyProgress: {
+                serializedName: "x-ms-copy-progress",
+                xmlName: "x-ms-copy-progress",
+                type: {
+                    name: "String",
+                },
+            },
+            copySource: {
+                serializedName: "x-ms-copy-source",
+                xmlName: "x-ms-copy-source",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                serializedName: "x-ms-copy-status",
+                xmlName: "x-ms-copy-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["pending", "success", "aborted", "failed"],
+                },
+            },
+            leaseDuration: {
+                serializedName: "x-ms-lease-duration",
+                xmlName: "x-ms-lease-duration",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["infinite", "fixed"],
+                },
+            },
+            leaseState: {
+                serializedName: "x-ms-lease-state",
+                xmlName: "x-ms-lease-state",
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "available",
+                        "leased",
+                        "expired",
+                        "breaking",
+                        "broken",
+                    ],
+                },
+            },
+            leaseStatus: {
+                serializedName: "x-ms-lease-status",
+                xmlName: "x-ms-lease-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["locked", "unlocked"],
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            acceptRanges: {
+                serializedName: "accept-ranges",
+                xmlName: "accept-ranges",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobCommittedBlockCount: {
+                serializedName: "x-ms-blob-committed-block-count",
+                xmlName: "x-ms-blob-committed-block-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-server-encrypted",
+                xmlName: "x-ms-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            blobContentMD5: {
+                serializedName: "x-ms-blob-content-md5",
+                xmlName: "x-ms-blob-content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+            contentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+        },
+    },
+};
+const BlobQueryExceptionHeaders = {
+    serializedName: "Blob_queryExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobQueryExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobGetTagsHeaders = {
+    serializedName: "Blob_getTagsHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobGetTagsHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobGetTagsExceptionHeaders = {
+    serializedName: "Blob_getTagsExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobGetTagsExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetTagsHeaders = {
+    serializedName: "Blob_setTagsHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetTagsHeaders",
+        modelProperties: {
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlobSetTagsExceptionHeaders = {
+    serializedName: "Blob_setTagsExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlobSetTagsExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobCreateHeaders = {
+    serializedName: "PageBlob_createHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobCreateHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobCreateExceptionHeaders = {
+    serializedName: "PageBlob_createExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobCreateExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobUploadPagesHeaders = {
+    serializedName: "PageBlob_uploadPagesHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobUploadPagesHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobUploadPagesExceptionHeaders = {
+    serializedName: "PageBlob_uploadPagesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobUploadPagesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobClearPagesHeaders = {
+    serializedName: "PageBlob_clearPagesHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobClearPagesHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobClearPagesExceptionHeaders = {
+    serializedName: "PageBlob_clearPagesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobClearPagesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobUploadPagesFromURLHeaders = {
+    serializedName: "PageBlob_uploadPagesFromURLHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobUploadPagesFromURLHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobUploadPagesFromURLExceptionHeaders = {
+    serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobUploadPagesFromURLExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobGetPageRangesHeaders = {
+    serializedName: "PageBlob_getPageRangesHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobGetPageRangesHeaders",
+        modelProperties: {
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            blobContentLength: {
+                serializedName: "x-ms-blob-content-length",
+                xmlName: "x-ms-blob-content-length",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobGetPageRangesExceptionHeaders = {
+    serializedName: "PageBlob_getPageRangesExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobGetPageRangesExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobGetPageRangesDiffHeaders = {
+    serializedName: "PageBlob_getPageRangesDiffHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobGetPageRangesDiffHeaders",
+        modelProperties: {
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            blobContentLength: {
+                serializedName: "x-ms-blob-content-length",
+                xmlName: "x-ms-blob-content-length",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobGetPageRangesDiffExceptionHeaders = {
+    serializedName: "PageBlob_getPageRangesDiffExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobGetPageRangesDiffExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobResizeHeaders = {
+    serializedName: "PageBlob_resizeHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobResizeHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobResizeExceptionHeaders = {
+    serializedName: "PageBlob_resizeExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobResizeExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobUpdateSequenceNumberHeaders = {
+    serializedName: "PageBlob_updateSequenceNumberHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobUpdateSequenceNumberHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobSequenceNumber: {
+                serializedName: "x-ms-blob-sequence-number",
+                xmlName: "x-ms-blob-sequence-number",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobUpdateSequenceNumberExceptionHeaders = {
+    serializedName: "PageBlob_updateSequenceNumberExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobUpdateSequenceNumberExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobCopyIncrementalHeaders = {
+    serializedName: "PageBlob_copyIncrementalHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobCopyIncrementalHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            copyId: {
+                serializedName: "x-ms-copy-id",
+                xmlName: "x-ms-copy-id",
+                type: {
+                    name: "String",
+                },
+            },
+            copyStatus: {
+                serializedName: "x-ms-copy-status",
+                xmlName: "x-ms-copy-status",
+                type: {
+                    name: "Enum",
+                    allowedValues: ["pending", "success", "aborted", "failed"],
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const PageBlobCopyIncrementalExceptionHeaders = {
+    serializedName: "PageBlob_copyIncrementalExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "PageBlobCopyIncrementalExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobCreateHeaders = {
+    serializedName: "AppendBlob_createHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobCreateHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobCreateExceptionHeaders = {
+    serializedName: "AppendBlob_createExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobCreateExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobAppendBlockHeaders = {
+    serializedName: "AppendBlob_appendBlockHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobAppendBlockHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobAppendOffset: {
+                serializedName: "x-ms-blob-append-offset",
+                xmlName: "x-ms-blob-append-offset",
+                type: {
+                    name: "String",
+                },
+            },
+            blobCommittedBlockCount: {
+                serializedName: "x-ms-blob-committed-block-count",
+                xmlName: "x-ms-blob-committed-block-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobAppendBlockExceptionHeaders = {
+    serializedName: "AppendBlob_appendBlockExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobAppendBlockExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobAppendBlockFromUrlHeaders = {
+    serializedName: "AppendBlob_appendBlockFromUrlHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobAppendBlockFromUrlHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            blobAppendOffset: {
+                serializedName: "x-ms-blob-append-offset",
+                xmlName: "x-ms-blob-append-offset",
+                type: {
+                    name: "String",
+                },
+            },
+            blobCommittedBlockCount: {
+                serializedName: "x-ms-blob-committed-block-count",
+                xmlName: "x-ms-blob-committed-block-count",
+                type: {
+                    name: "Number",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobAppendBlockFromUrlExceptionHeaders = {
+    serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobAppendBlockFromUrlExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const AppendBlobSealHeaders = {
+    serializedName: "AppendBlob_sealHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobSealHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isSealed: {
+                serializedName: "x-ms-blob-sealed",
+                xmlName: "x-ms-blob-sealed",
+                type: {
+                    name: "Boolean",
+                },
+            },
+        },
+    },
+};
+const AppendBlobSealExceptionHeaders = {
+    serializedName: "AppendBlob_sealExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "AppendBlobSealExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobUploadHeaders = {
+    serializedName: "BlockBlob_uploadHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobUploadHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobUploadExceptionHeaders = {
+    serializedName: "BlockBlob_uploadExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobUploadExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobPutBlobFromUrlHeaders = {
+    serializedName: "BlockBlob_putBlobFromUrlHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobPutBlobFromUrlHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobPutBlobFromUrlExceptionHeaders = {
+    serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobPutBlobFromUrlExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobStageBlockHeaders = {
+    serializedName: "BlockBlob_stageBlockHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobStageBlockHeaders",
+        modelProperties: {
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobStageBlockExceptionHeaders = {
+    serializedName: "BlockBlob_stageBlockExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobStageBlockExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobStageBlockFromURLHeaders = {
+    serializedName: "BlockBlob_stageBlockFromURLHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobStageBlockFromURLHeaders",
+        modelProperties: {
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobStageBlockFromURLExceptionHeaders = {
+    serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobStageBlockFromURLExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobCommitBlockListHeaders = {
+    serializedName: "BlockBlob_commitBlockListHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobCommitBlockListHeaders",
+        modelProperties: {
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            contentMD5: {
+                serializedName: "content-md5",
+                xmlName: "content-md5",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            xMsContentCrc64: {
+                serializedName: "x-ms-content-crc64",
+                xmlName: "x-ms-content-crc64",
+                type: {
+                    name: "ByteArray",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            versionId: {
+                serializedName: "x-ms-version-id",
+                xmlName: "x-ms-version-id",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            isServerEncrypted: {
+                serializedName: "x-ms-request-server-encrypted",
+                xmlName: "x-ms-request-server-encrypted",
+                type: {
+                    name: "Boolean",
+                },
+            },
+            encryptionKeySha256: {
+                serializedName: "x-ms-encryption-key-sha256",
+                xmlName: "x-ms-encryption-key-sha256",
+                type: {
+                    name: "String",
+                },
+            },
+            encryptionScope: {
+                serializedName: "x-ms-encryption-scope",
+                xmlName: "x-ms-encryption-scope",
+                type: {
+                    name: "String",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobCommitBlockListExceptionHeaders = {
+    serializedName: "BlockBlob_commitBlockListExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobCommitBlockListExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobGetBlockListHeaders = {
+    serializedName: "BlockBlob_getBlockListHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobGetBlockListHeaders",
+        modelProperties: {
+            lastModified: {
+                serializedName: "last-modified",
+                xmlName: "last-modified",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            etag: {
+                serializedName: "etag",
+                xmlName: "etag",
+                type: {
+                    name: "String",
+                },
+            },
+            contentType: {
+                serializedName: "content-type",
+                xmlName: "content-type",
+                type: {
+                    name: "String",
+                },
+            },
+            blobContentLength: {
+                serializedName: "x-ms-blob-content-length",
+                xmlName: "x-ms-blob-content-length",
+                type: {
+                    name: "Number",
+                },
+            },
+            clientRequestId: {
+                serializedName: "x-ms-client-request-id",
+                xmlName: "x-ms-client-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            requestId: {
+                serializedName: "x-ms-request-id",
+                xmlName: "x-ms-request-id",
+                type: {
+                    name: "String",
+                },
+            },
+            version: {
+                serializedName: "x-ms-version",
+                xmlName: "x-ms-version",
+                type: {
+                    name: "String",
+                },
+            },
+            date: {
+                serializedName: "date",
+                xmlName: "date",
+                type: {
+                    name: "DateTimeRfc1123",
+                },
+            },
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+const BlockBlobGetBlockListExceptionHeaders = {
+    serializedName: "BlockBlob_getBlockListExceptionHeaders",
+    type: {
+        name: "Composite",
+        className: "BlockBlobGetBlockListExceptionHeaders",
+        modelProperties: {
+            errorCode: {
+                serializedName: "x-ms-error-code",
+                xmlName: "x-ms-error-code",
+                type: {
+                    name: "String",
+                },
+            },
+        },
+    },
+};
+
+var Mappers = /*#__PURE__*/Object.freeze({
+    __proto__: null,
+    AccessPolicy: AccessPolicy,
+    AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders,
+    AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders,
+    AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders,
+    AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders,
+    AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders,
+    AppendBlobCreateHeaders: AppendBlobCreateHeaders,
+    AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders,
+    AppendBlobSealHeaders: AppendBlobSealHeaders,
+    ArrowConfiguration: ArrowConfiguration,
+    ArrowField: ArrowField,
+    BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders,
+    BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders,
+    BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders,
+    BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders,
+    BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders,
+    BlobBreakLeaseHeaders: BlobBreakLeaseHeaders,
+    BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders,
+    BlobChangeLeaseHeaders: BlobChangeLeaseHeaders,
+    BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders,
+    BlobCopyFromURLHeaders: BlobCopyFromURLHeaders,
+    BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders,
+    BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders,
+    BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders,
+    BlobDeleteHeaders: BlobDeleteHeaders,
+    BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders,
+    BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders,
+    BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders,
+    BlobDownloadHeaders: BlobDownloadHeaders,
+    BlobFlatListSegment: BlobFlatListSegment,
+    BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders,
+    BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders,
+    BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders,
+    BlobGetPropertiesHeaders: BlobGetPropertiesHeaders,
+    BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders,
+    BlobGetTagsHeaders: BlobGetTagsHeaders,
+    BlobHierarchyListSegment: BlobHierarchyListSegment,
+    BlobItemInternal: BlobItemInternal,
+    BlobName: BlobName,
+    BlobPrefix: BlobPrefix,
+    BlobPropertiesInternal: BlobPropertiesInternal,
+    BlobQueryExceptionHeaders: BlobQueryExceptionHeaders,
+    BlobQueryHeaders: BlobQueryHeaders,
+    BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders,
+    BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders,
+    BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders,
+    BlobRenewLeaseHeaders: BlobRenewLeaseHeaders,
+    BlobServiceProperties: BlobServiceProperties,
+    BlobServiceStatistics: BlobServiceStatistics,
+    BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders,
+    BlobSetExpiryHeaders: BlobSetExpiryHeaders,
+    BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders,
+    BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders,
+    BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders,
+    BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders,
+    BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders,
+    BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders,
+    BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders,
+    BlobSetMetadataHeaders: BlobSetMetadataHeaders,
+    BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders,
+    BlobSetTagsHeaders: BlobSetTagsHeaders,
+    BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders,
+    BlobSetTierHeaders: BlobSetTierHeaders,
+    BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders,
+    BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders,
+    BlobTag: BlobTag,
+    BlobTags: BlobTags,
+    BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders,
+    BlobUndeleteHeaders: BlobUndeleteHeaders,
+    Block: Block,
+    BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders,
+    BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders,
+    BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders,
+    BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders,
+    BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders,
+    BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders,
+    BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders,
+    BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders,
+    BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders,
+    BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders,
+    BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders,
+    BlockBlobUploadHeaders: BlockBlobUploadHeaders,
+    BlockList: BlockList,
+    BlockLookupList: BlockLookupList,
+    ClearRange: ClearRange,
+    ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders,
+    ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders,
+    ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders,
+    ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders,
+    ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders,
+    ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders,
+    ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders,
+    ContainerCreateHeaders: ContainerCreateHeaders,
+    ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders,
+    ContainerDeleteHeaders: ContainerDeleteHeaders,
+    ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders,
+    ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders,
+    ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders,
+    ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders,
+    ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders,
+    ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders,
+    ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders,
+    ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders,
+    ContainerItem: ContainerItem,
+    ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders,
+    ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders,
+    ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders,
+    ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders,
+    ContainerProperties: ContainerProperties,
+    ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders,
+    ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders,
+    ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders,
+    ContainerRenameHeaders: ContainerRenameHeaders,
+    ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders,
+    ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders,
+    ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders,
+    ContainerRestoreHeaders: ContainerRestoreHeaders,
+    ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders,
+    ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders,
+    ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders,
+    ContainerSetMetadataHeaders: ContainerSetMetadataHeaders,
+    ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders,
+    ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders,
+    CorsRule: CorsRule,
+    DelimitedTextConfiguration: DelimitedTextConfiguration,
+    FilterBlobItem: FilterBlobItem,
+    FilterBlobSegment: FilterBlobSegment,
+    GeoReplication: GeoReplication,
+    JsonTextConfiguration: JsonTextConfiguration,
+    KeyInfo: KeyInfo,
+    ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse,
+    ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse,
+    ListContainersSegmentResponse: ListContainersSegmentResponse,
+    Logging: Logging,
+    Metrics: Metrics,
+    PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders,
+    PageBlobClearPagesHeaders: PageBlobClearPagesHeaders,
+    PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders,
+    PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders,
+    PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders,
+    PageBlobCreateHeaders: PageBlobCreateHeaders,
+    PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders,
+    PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders,
+    PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders,
+    PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders,
+    PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders,
+    PageBlobResizeHeaders: PageBlobResizeHeaders,
+    PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders,
+    PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders,
+    PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders,
+    PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders,
+    PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders,
+    PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders,
+    PageList: PageList,
+    PageRange: PageRange,
+    QueryFormat: QueryFormat,
+    QueryRequest: QueryRequest,
+    QuerySerialization: QuerySerialization,
+    RetentionPolicy: RetentionPolicy,
+    ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders,
+    ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders,
+    ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders,
+    ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders,
+    ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders,
+    ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders,
+    ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders,
+    ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders,
+    ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders,
+    ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders,
+    ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders,
+    ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders,
+    ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders,
+    ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders,
+    ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders,
+    ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders,
+    SignedIdentifier: SignedIdentifier,
+    StaticWebsite: StaticWebsite,
+    StorageError: StorageError,
+    UserDelegationKey: UserDelegationKey
+});
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+const contentType = {
+    parameterPath: ["options", "contentType"],
+    mapper: {
+        defaultValue: "application/xml",
+        isConstant: true,
+        serializedName: "Content-Type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobServiceProperties = {
+    parameterPath: "blobServiceProperties",
+    mapper: BlobServiceProperties,
+};
+const accept = {
+    parameterPath: "accept",
+    mapper: {
+        defaultValue: "application/xml",
+        isConstant: true,
+        serializedName: "Accept",
+        type: {
+            name: "String",
+        },
+    },
+};
+const url = {
+    parameterPath: "url",
+    mapper: {
+        serializedName: "url",
+        required: true,
+        xmlName: "url",
+        type: {
+            name: "String",
+        },
+    },
+    skipEncoding: true,
+};
+const restype = {
+    parameterPath: "restype",
+    mapper: {
+        defaultValue: "service",
+        isConstant: true,
+        serializedName: "restype",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "properties",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const timeoutInSeconds = {
+    parameterPath: ["options", "timeoutInSeconds"],
+    mapper: {
+        constraints: {
+            InclusiveMinimum: 0,
+        },
+        serializedName: "timeout",
+        xmlName: "timeout",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const version = {
+    parameterPath: "version",
+    mapper: {
+        defaultValue: "2025-01-05",
+        isConstant: true,
+        serializedName: "x-ms-version",
+        type: {
+            name: "String",
+        },
+    },
+};
+const requestId = {
+    parameterPath: ["options", "requestId"],
+    mapper: {
+        serializedName: "x-ms-client-request-id",
+        xmlName: "x-ms-client-request-id",
+        type: {
+            name: "String",
+        },
+    },
+};
+const accept1 = {
+    parameterPath: "accept",
+    mapper: {
+        defaultValue: "application/xml",
+        isConstant: true,
+        serializedName: "Accept",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp1 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "stats",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp2 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "list",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const prefix = {
+    parameterPath: ["options", "prefix"],
+    mapper: {
+        serializedName: "prefix",
+        xmlName: "prefix",
+        type: {
+            name: "String",
+        },
+    },
+};
+const marker = {
+    parameterPath: ["options", "marker"],
+    mapper: {
+        serializedName: "marker",
+        xmlName: "marker",
+        type: {
+            name: "String",
+        },
+    },
+};
+const maxPageSize = {
+    parameterPath: ["options", "maxPageSize"],
+    mapper: {
+        constraints: {
+            InclusiveMinimum: 1,
+        },
+        serializedName: "maxresults",
+        xmlName: "maxresults",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const include = {
+    parameterPath: ["options", "include"],
+    mapper: {
+        serializedName: "include",
+        xmlName: "include",
+        xmlElementName: "ListContainersIncludeType",
+        type: {
+            name: "Sequence",
+            element: {
+                type: {
+                    name: "Enum",
+                    allowedValues: ["metadata", "deleted", "system"],
+                },
+            },
+        },
+    },
+    collectionFormat: "CSV",
+};
+const keyInfo = {
+    parameterPath: "keyInfo",
+    mapper: KeyInfo,
+};
+const comp3 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "userdelegationkey",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const restype1 = {
+    parameterPath: "restype",
+    mapper: {
+        defaultValue: "account",
+        isConstant: true,
+        serializedName: "restype",
+        type: {
+            name: "String",
+        },
+    },
+};
+const body = {
+    parameterPath: "body",
+    mapper: {
+        serializedName: "body",
+        required: true,
+        xmlName: "body",
+        type: {
+            name: "Stream",
+        },
+    },
+};
+const comp4 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "batch",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const contentLength = {
+    parameterPath: "contentLength",
+    mapper: {
+        serializedName: "Content-Length",
+        required: true,
+        xmlName: "Content-Length",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const multipartContentType = {
+    parameterPath: "multipartContentType",
+    mapper: {
+        serializedName: "Content-Type",
+        required: true,
+        xmlName: "Content-Type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp5 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "blobs",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const where = {
+    parameterPath: ["options", "where"],
+    mapper: {
+        serializedName: "where",
+        xmlName: "where",
+        type: {
+            name: "String",
+        },
+    },
+};
+const restype2 = {
+    parameterPath: "restype",
+    mapper: {
+        defaultValue: "container",
+        isConstant: true,
+        serializedName: "restype",
+        type: {
+            name: "String",
+        },
+    },
+};
+const metadata = {
+    parameterPath: ["options", "metadata"],
+    mapper: {
+        serializedName: "x-ms-meta",
+        xmlName: "x-ms-meta",
+        headerCollectionPrefix: "x-ms-meta-",
+        type: {
+            name: "Dictionary",
+            value: { type: { name: "String" } },
+        },
+    },
+};
+const access = {
+    parameterPath: ["options", "access"],
+    mapper: {
+        serializedName: "x-ms-blob-public-access",
+        xmlName: "x-ms-blob-public-access",
+        type: {
+            name: "Enum",
+            allowedValues: ["container", "blob"],
+        },
+    },
+};
+const defaultEncryptionScope = {
+    parameterPath: [
+        "options",
+        "containerEncryptionScope",
+        "defaultEncryptionScope",
+    ],
+    mapper: {
+        serializedName: "x-ms-default-encryption-scope",
+        xmlName: "x-ms-default-encryption-scope",
+        type: {
+            name: "String",
+        },
+    },
+};
+const preventEncryptionScopeOverride = {
+    parameterPath: [
+        "options",
+        "containerEncryptionScope",
+        "preventEncryptionScopeOverride",
+    ],
+    mapper: {
+        serializedName: "x-ms-deny-encryption-scope-override",
+        xmlName: "x-ms-deny-encryption-scope-override",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const leaseId = {
+    parameterPath: ["options", "leaseAccessConditions", "leaseId"],
+    mapper: {
+        serializedName: "x-ms-lease-id",
+        xmlName: "x-ms-lease-id",
+        type: {
+            name: "String",
+        },
+    },
+};
+const ifModifiedSince = {
+    parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"],
+    mapper: {
+        serializedName: "If-Modified-Since",
+        xmlName: "If-Modified-Since",
+        type: {
+            name: "DateTimeRfc1123",
+        },
+    },
+};
+const ifUnmodifiedSince = {
+    parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"],
+    mapper: {
+        serializedName: "If-Unmodified-Since",
+        xmlName: "If-Unmodified-Since",
+        type: {
+            name: "DateTimeRfc1123",
+        },
+    },
+};
+const comp6 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "metadata",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp7 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "acl",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const containerAcl = {
+    parameterPath: ["options", "containerAcl"],
+    mapper: {
+        serializedName: "containerAcl",
+        xmlName: "SignedIdentifiers",
+        xmlIsWrapped: true,
+        xmlElementName: "SignedIdentifier",
+        type: {
+            name: "Sequence",
+            element: {
+                type: {
+                    name: "Composite",
+                    className: "SignedIdentifier",
+                },
+            },
+        },
+    },
+};
+const comp8 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "undelete",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const deletedContainerName = {
+    parameterPath: ["options", "deletedContainerName"],
+    mapper: {
+        serializedName: "x-ms-deleted-container-name",
+        xmlName: "x-ms-deleted-container-name",
+        type: {
+            name: "String",
+        },
+    },
+};
+const deletedContainerVersion = {
+    parameterPath: ["options", "deletedContainerVersion"],
+    mapper: {
+        serializedName: "x-ms-deleted-container-version",
+        xmlName: "x-ms-deleted-container-version",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp9 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "rename",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceContainerName = {
+    parameterPath: "sourceContainerName",
+    mapper: {
+        serializedName: "x-ms-source-container-name",
+        required: true,
+        xmlName: "x-ms-source-container-name",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceLeaseId = {
+    parameterPath: ["options", "sourceLeaseId"],
+    mapper: {
+        serializedName: "x-ms-source-lease-id",
+        xmlName: "x-ms-source-lease-id",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp10 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "lease",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const action = {
+    parameterPath: "action",
+    mapper: {
+        defaultValue: "acquire",
+        isConstant: true,
+        serializedName: "x-ms-lease-action",
+        type: {
+            name: "String",
+        },
+    },
+};
+const duration = {
+    parameterPath: ["options", "duration"],
+    mapper: {
+        serializedName: "x-ms-lease-duration",
+        xmlName: "x-ms-lease-duration",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const proposedLeaseId = {
+    parameterPath: ["options", "proposedLeaseId"],
+    mapper: {
+        serializedName: "x-ms-proposed-lease-id",
+        xmlName: "x-ms-proposed-lease-id",
+        type: {
+            name: "String",
+        },
+    },
+};
+const action1 = {
+    parameterPath: "action",
+    mapper: {
+        defaultValue: "release",
+        isConstant: true,
+        serializedName: "x-ms-lease-action",
+        type: {
+            name: "String",
+        },
+    },
+};
+const leaseId1 = {
+    parameterPath: "leaseId",
+    mapper: {
+        serializedName: "x-ms-lease-id",
+        required: true,
+        xmlName: "x-ms-lease-id",
+        type: {
+            name: "String",
+        },
+    },
+};
+const action2 = {
+    parameterPath: "action",
+    mapper: {
+        defaultValue: "renew",
+        isConstant: true,
+        serializedName: "x-ms-lease-action",
+        type: {
+            name: "String",
+        },
+    },
+};
+const action3 = {
+    parameterPath: "action",
+    mapper: {
+        defaultValue: "break",
+        isConstant: true,
+        serializedName: "x-ms-lease-action",
+        type: {
+            name: "String",
+        },
+    },
+};
+const breakPeriod = {
+    parameterPath: ["options", "breakPeriod"],
+    mapper: {
+        serializedName: "x-ms-lease-break-period",
+        xmlName: "x-ms-lease-break-period",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const action4 = {
+    parameterPath: "action",
+    mapper: {
+        defaultValue: "change",
+        isConstant: true,
+        serializedName: "x-ms-lease-action",
+        type: {
+            name: "String",
+        },
+    },
+};
+const proposedLeaseId1 = {
+    parameterPath: "proposedLeaseId",
+    mapper: {
+        serializedName: "x-ms-proposed-lease-id",
+        required: true,
+        xmlName: "x-ms-proposed-lease-id",
+        type: {
+            name: "String",
+        },
+    },
+};
+const include1 = {
+    parameterPath: ["options", "include"],
+    mapper: {
+        serializedName: "include",
+        xmlName: "include",
+        xmlElementName: "ListBlobsIncludeItem",
+        type: {
+            name: "Sequence",
+            element: {
+                type: {
+                    name: "Enum",
+                    allowedValues: [
+                        "copy",
+                        "deleted",
+                        "metadata",
+                        "snapshots",
+                        "uncommittedblobs",
+                        "versions",
+                        "tags",
+                        "immutabilitypolicy",
+                        "legalhold",
+                        "deletedwithversions",
+                    ],
+                },
+            },
+        },
+    },
+    collectionFormat: "CSV",
+};
+const delimiter = {
+    parameterPath: "delimiter",
+    mapper: {
+        serializedName: "delimiter",
+        required: true,
+        xmlName: "delimiter",
+        type: {
+            name: "String",
+        },
+    },
+};
+const snapshot = {
+    parameterPath: ["options", "snapshot"],
+    mapper: {
+        serializedName: "snapshot",
+        xmlName: "snapshot",
+        type: {
+            name: "String",
+        },
+    },
+};
+const versionId = {
+    parameterPath: ["options", "versionId"],
+    mapper: {
+        serializedName: "versionid",
+        xmlName: "versionid",
+        type: {
+            name: "String",
+        },
+    },
+};
+const range = {
+    parameterPath: ["options", "range"],
+    mapper: {
+        serializedName: "x-ms-range",
+        xmlName: "x-ms-range",
+        type: {
+            name: "String",
+        },
+    },
+};
+const rangeGetContentMD5 = {
+    parameterPath: ["options", "rangeGetContentMD5"],
+    mapper: {
+        serializedName: "x-ms-range-get-content-md5",
+        xmlName: "x-ms-range-get-content-md5",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const rangeGetContentCRC64 = {
+    parameterPath: ["options", "rangeGetContentCRC64"],
+    mapper: {
+        serializedName: "x-ms-range-get-content-crc64",
+        xmlName: "x-ms-range-get-content-crc64",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const encryptionKey = {
+    parameterPath: ["options", "cpkInfo", "encryptionKey"],
+    mapper: {
+        serializedName: "x-ms-encryption-key",
+        xmlName: "x-ms-encryption-key",
+        type: {
+            name: "String",
+        },
+    },
+};
+const encryptionKeySha256 = {
+    parameterPath: ["options", "cpkInfo", "encryptionKeySha256"],
+    mapper: {
+        serializedName: "x-ms-encryption-key-sha256",
+        xmlName: "x-ms-encryption-key-sha256",
+        type: {
+            name: "String",
+        },
+    },
+};
+const encryptionAlgorithm = {
+    parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"],
+    mapper: {
+        serializedName: "x-ms-encryption-algorithm",
+        xmlName: "x-ms-encryption-algorithm",
+        type: {
+            name: "String",
+        },
+    },
+};
+const ifMatch = {
+    parameterPath: ["options", "modifiedAccessConditions", "ifMatch"],
+    mapper: {
+        serializedName: "If-Match",
+        xmlName: "If-Match",
+        type: {
+            name: "String",
+        },
+    },
+};
+const ifNoneMatch = {
+    parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"],
+    mapper: {
+        serializedName: "If-None-Match",
+        xmlName: "If-None-Match",
+        type: {
+            name: "String",
+        },
+    },
+};
+const ifTags = {
+    parameterPath: ["options", "modifiedAccessConditions", "ifTags"],
+    mapper: {
+        serializedName: "x-ms-if-tags",
+        xmlName: "x-ms-if-tags",
+        type: {
+            name: "String",
+        },
+    },
+};
+const deleteSnapshots = {
+    parameterPath: ["options", "deleteSnapshots"],
+    mapper: {
+        serializedName: "x-ms-delete-snapshots",
+        xmlName: "x-ms-delete-snapshots",
+        type: {
+            name: "Enum",
+            allowedValues: ["include", "only"],
+        },
+    },
+};
+const blobDeleteType = {
+    parameterPath: ["options", "blobDeleteType"],
+    mapper: {
+        serializedName: "deletetype",
+        xmlName: "deletetype",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp11 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "expiry",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const expiryOptions = {
+    parameterPath: "expiryOptions",
+    mapper: {
+        serializedName: "x-ms-expiry-option",
+        required: true,
+        xmlName: "x-ms-expiry-option",
+        type: {
+            name: "String",
+        },
+    },
+};
+const expiresOn = {
+    parameterPath: ["options", "expiresOn"],
+    mapper: {
+        serializedName: "x-ms-expiry-time",
+        xmlName: "x-ms-expiry-time",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobCacheControl = {
+    parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"],
+    mapper: {
+        serializedName: "x-ms-blob-cache-control",
+        xmlName: "x-ms-blob-cache-control",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobContentType = {
+    parameterPath: ["options", "blobHttpHeaders", "blobContentType"],
+    mapper: {
+        serializedName: "x-ms-blob-content-type",
+        xmlName: "x-ms-blob-content-type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobContentMD5 = {
+    parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"],
+    mapper: {
+        serializedName: "x-ms-blob-content-md5",
+        xmlName: "x-ms-blob-content-md5",
+        type: {
+            name: "ByteArray",
+        },
+    },
+};
+const blobContentEncoding = {
+    parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"],
+    mapper: {
+        serializedName: "x-ms-blob-content-encoding",
+        xmlName: "x-ms-blob-content-encoding",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobContentLanguage = {
+    parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"],
+    mapper: {
+        serializedName: "x-ms-blob-content-language",
+        xmlName: "x-ms-blob-content-language",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobContentDisposition = {
+    parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"],
+    mapper: {
+        serializedName: "x-ms-blob-content-disposition",
+        xmlName: "x-ms-blob-content-disposition",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp12 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "immutabilityPolicies",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const immutabilityPolicyExpiry = {
+    parameterPath: ["options", "immutabilityPolicyExpiry"],
+    mapper: {
+        serializedName: "x-ms-immutability-policy-until-date",
+        xmlName: "x-ms-immutability-policy-until-date",
+        type: {
+            name: "DateTimeRfc1123",
+        },
+    },
+};
+const immutabilityPolicyMode = {
+    parameterPath: ["options", "immutabilityPolicyMode"],
+    mapper: {
+        serializedName: "x-ms-immutability-policy-mode",
+        xmlName: "x-ms-immutability-policy-mode",
+        type: {
+            name: "Enum",
+            allowedValues: ["Mutable", "Unlocked", "Locked"],
+        },
+    },
+};
+const comp13 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "legalhold",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const legalHold = {
+    parameterPath: "legalHold",
+    mapper: {
+        serializedName: "x-ms-legal-hold",
+        required: true,
+        xmlName: "x-ms-legal-hold",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const encryptionScope = {
+    parameterPath: ["options", "encryptionScope"],
+    mapper: {
+        serializedName: "x-ms-encryption-scope",
+        xmlName: "x-ms-encryption-scope",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp14 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "snapshot",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const tier = {
+    parameterPath: ["options", "tier"],
+    mapper: {
+        serializedName: "x-ms-access-tier",
+        xmlName: "x-ms-access-tier",
+        type: {
+            name: "Enum",
+            allowedValues: [
+                "P4",
+                "P6",
+                "P10",
+                "P15",
+                "P20",
+                "P30",
+                "P40",
+                "P50",
+                "P60",
+                "P70",
+                "P80",
+                "Hot",
+                "Cool",
+                "Archive",
+                "Cold",
+            ],
+        },
+    },
+};
+const rehydratePriority = {
+    parameterPath: ["options", "rehydratePriority"],
+    mapper: {
+        serializedName: "x-ms-rehydrate-priority",
+        xmlName: "x-ms-rehydrate-priority",
+        type: {
+            name: "Enum",
+            allowedValues: ["High", "Standard"],
+        },
+    },
+};
+const sourceIfModifiedSince = {
+    parameterPath: [
+        "options",
+        "sourceModifiedAccessConditions",
+        "sourceIfModifiedSince",
+    ],
+    mapper: {
+        serializedName: "x-ms-source-if-modified-since",
+        xmlName: "x-ms-source-if-modified-since",
+        type: {
+            name: "DateTimeRfc1123",
+        },
+    },
+};
+const sourceIfUnmodifiedSince = {
+    parameterPath: [
+        "options",
+        "sourceModifiedAccessConditions",
+        "sourceIfUnmodifiedSince",
+    ],
+    mapper: {
+        serializedName: "x-ms-source-if-unmodified-since",
+        xmlName: "x-ms-source-if-unmodified-since",
+        type: {
+            name: "DateTimeRfc1123",
+        },
+    },
+};
+const sourceIfMatch = {
+    parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"],
+    mapper: {
+        serializedName: "x-ms-source-if-match",
+        xmlName: "x-ms-source-if-match",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceIfNoneMatch = {
+    parameterPath: [
+        "options",
+        "sourceModifiedAccessConditions",
+        "sourceIfNoneMatch",
+    ],
+    mapper: {
+        serializedName: "x-ms-source-if-none-match",
+        xmlName: "x-ms-source-if-none-match",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceIfTags = {
+    parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"],
+    mapper: {
+        serializedName: "x-ms-source-if-tags",
+        xmlName: "x-ms-source-if-tags",
+        type: {
+            name: "String",
+        },
+    },
+};
+const copySource = {
+    parameterPath: "copySource",
+    mapper: {
+        serializedName: "x-ms-copy-source",
+        required: true,
+        xmlName: "x-ms-copy-source",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobTagsString = {
+    parameterPath: ["options", "blobTagsString"],
+    mapper: {
+        serializedName: "x-ms-tags",
+        xmlName: "x-ms-tags",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sealBlob = {
+    parameterPath: ["options", "sealBlob"],
+    mapper: {
+        serializedName: "x-ms-seal-blob",
+        xmlName: "x-ms-seal-blob",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const legalHold1 = {
+    parameterPath: ["options", "legalHold"],
+    mapper: {
+        serializedName: "x-ms-legal-hold",
+        xmlName: "x-ms-legal-hold",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const xMsRequiresSync = {
+    parameterPath: "xMsRequiresSync",
+    mapper: {
+        defaultValue: "true",
+        isConstant: true,
+        serializedName: "x-ms-requires-sync",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceContentMD5 = {
+    parameterPath: ["options", "sourceContentMD5"],
+    mapper: {
+        serializedName: "x-ms-source-content-md5",
+        xmlName: "x-ms-source-content-md5",
+        type: {
+            name: "ByteArray",
+        },
+    },
+};
+const copySourceAuthorization = {
+    parameterPath: ["options", "copySourceAuthorization"],
+    mapper: {
+        serializedName: "x-ms-copy-source-authorization",
+        xmlName: "x-ms-copy-source-authorization",
+        type: {
+            name: "String",
+        },
+    },
+};
+const copySourceTags = {
+    parameterPath: ["options", "copySourceTags"],
+    mapper: {
+        serializedName: "x-ms-copy-source-tag-option",
+        xmlName: "x-ms-copy-source-tag-option",
+        type: {
+            name: "Enum",
+            allowedValues: ["REPLACE", "COPY"],
+        },
+    },
+};
+const comp15 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "copy",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const copyActionAbortConstant = {
+    parameterPath: "copyActionAbortConstant",
+    mapper: {
+        defaultValue: "abort",
+        isConstant: true,
+        serializedName: "x-ms-copy-action",
+        type: {
+            name: "String",
+        },
+    },
+};
+const copyId = {
+    parameterPath: "copyId",
+    mapper: {
+        serializedName: "copyid",
+        required: true,
+        xmlName: "copyid",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp16 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "tier",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const tier1 = {
+    parameterPath: "tier",
+    mapper: {
+        serializedName: "x-ms-access-tier",
+        required: true,
+        xmlName: "x-ms-access-tier",
+        type: {
+            name: "Enum",
+            allowedValues: [
+                "P4",
+                "P6",
+                "P10",
+                "P15",
+                "P20",
+                "P30",
+                "P40",
+                "P50",
+                "P60",
+                "P70",
+                "P80",
+                "Hot",
+                "Cool",
+                "Archive",
+                "Cold",
+            ],
+        },
+    },
+};
+const queryRequest = {
+    parameterPath: ["options", "queryRequest"],
+    mapper: QueryRequest,
+};
+const comp17 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "query",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp18 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "tags",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const tags = {
+    parameterPath: ["options", "tags"],
+    mapper: BlobTags,
+};
+const transactionalContentMD5 = {
+    parameterPath: ["options", "transactionalContentMD5"],
+    mapper: {
+        serializedName: "Content-MD5",
+        xmlName: "Content-MD5",
+        type: {
+            name: "ByteArray",
+        },
+    },
+};
+const transactionalContentCrc64 = {
+    parameterPath: ["options", "transactionalContentCrc64"],
+    mapper: {
+        serializedName: "x-ms-content-crc64",
+        xmlName: "x-ms-content-crc64",
+        type: {
+            name: "ByteArray",
+        },
+    },
+};
+const blobType = {
+    parameterPath: "blobType",
+    mapper: {
+        defaultValue: "PageBlob",
+        isConstant: true,
+        serializedName: "x-ms-blob-type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobContentLength = {
+    parameterPath: "blobContentLength",
+    mapper: {
+        serializedName: "x-ms-blob-content-length",
+        required: true,
+        xmlName: "x-ms-blob-content-length",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const blobSequenceNumber = {
+    parameterPath: ["options", "blobSequenceNumber"],
+    mapper: {
+        defaultValue: 0,
+        serializedName: "x-ms-blob-sequence-number",
+        xmlName: "x-ms-blob-sequence-number",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const contentType1 = {
+    parameterPath: ["options", "contentType"],
+    mapper: {
+        defaultValue: "application/octet-stream",
+        isConstant: true,
+        serializedName: "Content-Type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const body1 = {
+    parameterPath: "body",
+    mapper: {
+        serializedName: "body",
+        required: true,
+        xmlName: "body",
+        type: {
+            name: "Stream",
+        },
+    },
+};
+const accept2 = {
+    parameterPath: "accept",
+    mapper: {
+        defaultValue: "application/xml",
+        isConstant: true,
+        serializedName: "Accept",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp19 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "page",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const pageWrite = {
+    parameterPath: "pageWrite",
+    mapper: {
+        defaultValue: "update",
+        isConstant: true,
+        serializedName: "x-ms-page-write",
+        type: {
+            name: "String",
+        },
+    },
+};
+const ifSequenceNumberLessThanOrEqualTo = {
+    parameterPath: [
+        "options",
+        "sequenceNumberAccessConditions",
+        "ifSequenceNumberLessThanOrEqualTo",
+    ],
+    mapper: {
+        serializedName: "x-ms-if-sequence-number-le",
+        xmlName: "x-ms-if-sequence-number-le",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const ifSequenceNumberLessThan = {
+    parameterPath: [
+        "options",
+        "sequenceNumberAccessConditions",
+        "ifSequenceNumberLessThan",
+    ],
+    mapper: {
+        serializedName: "x-ms-if-sequence-number-lt",
+        xmlName: "x-ms-if-sequence-number-lt",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const ifSequenceNumberEqualTo = {
+    parameterPath: [
+        "options",
+        "sequenceNumberAccessConditions",
+        "ifSequenceNumberEqualTo",
+    ],
+    mapper: {
+        serializedName: "x-ms-if-sequence-number-eq",
+        xmlName: "x-ms-if-sequence-number-eq",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const pageWrite1 = {
+    parameterPath: "pageWrite",
+    mapper: {
+        defaultValue: "clear",
+        isConstant: true,
+        serializedName: "x-ms-page-write",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceUrl = {
+    parameterPath: "sourceUrl",
+    mapper: {
+        serializedName: "x-ms-copy-source",
+        required: true,
+        xmlName: "x-ms-copy-source",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceRange = {
+    parameterPath: "sourceRange",
+    mapper: {
+        serializedName: "x-ms-source-range",
+        required: true,
+        xmlName: "x-ms-source-range",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sourceContentCrc64 = {
+    parameterPath: ["options", "sourceContentCrc64"],
+    mapper: {
+        serializedName: "x-ms-source-content-crc64",
+        xmlName: "x-ms-source-content-crc64",
+        type: {
+            name: "ByteArray",
+        },
+    },
+};
+const range1 = {
+    parameterPath: "range",
+    mapper: {
+        serializedName: "x-ms-range",
+        required: true,
+        xmlName: "x-ms-range",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp20 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "pagelist",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const prevsnapshot = {
+    parameterPath: ["options", "prevsnapshot"],
+    mapper: {
+        serializedName: "prevsnapshot",
+        xmlName: "prevsnapshot",
+        type: {
+            name: "String",
+        },
+    },
+};
+const prevSnapshotUrl = {
+    parameterPath: ["options", "prevSnapshotUrl"],
+    mapper: {
+        serializedName: "x-ms-previous-snapshot-url",
+        xmlName: "x-ms-previous-snapshot-url",
+        type: {
+            name: "String",
+        },
+    },
+};
+const sequenceNumberAction = {
+    parameterPath: "sequenceNumberAction",
+    mapper: {
+        serializedName: "x-ms-sequence-number-action",
+        required: true,
+        xmlName: "x-ms-sequence-number-action",
+        type: {
+            name: "Enum",
+            allowedValues: ["max", "update", "increment"],
+        },
+    },
+};
+const comp21 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "incrementalcopy",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobType1 = {
+    parameterPath: "blobType",
+    mapper: {
+        defaultValue: "AppendBlob",
+        isConstant: true,
+        serializedName: "x-ms-blob-type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp22 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "appendblock",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const maxSize = {
+    parameterPath: ["options", "appendPositionAccessConditions", "maxSize"],
+    mapper: {
+        serializedName: "x-ms-blob-condition-maxsize",
+        xmlName: "x-ms-blob-condition-maxsize",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const appendPosition = {
+    parameterPath: [
+        "options",
+        "appendPositionAccessConditions",
+        "appendPosition",
+    ],
+    mapper: {
+        serializedName: "x-ms-blob-condition-appendpos",
+        xmlName: "x-ms-blob-condition-appendpos",
+        type: {
+            name: "Number",
+        },
+    },
+};
+const sourceRange1 = {
+    parameterPath: ["options", "sourceRange"],
+    mapper: {
+        serializedName: "x-ms-source-range",
+        xmlName: "x-ms-source-range",
+        type: {
+            name: "String",
+        },
+    },
+};
+const comp23 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "seal",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blobType2 = {
+    parameterPath: "blobType",
+    mapper: {
+        defaultValue: "BlockBlob",
+        isConstant: true,
+        serializedName: "x-ms-blob-type",
+        type: {
+            name: "String",
+        },
+    },
+};
+const copySourceBlobProperties = {
+    parameterPath: ["options", "copySourceBlobProperties"],
+    mapper: {
+        serializedName: "x-ms-copy-source-blob-properties",
+        xmlName: "x-ms-copy-source-blob-properties",
+        type: {
+            name: "Boolean",
+        },
+    },
+};
+const comp24 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "block",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blockId = {
+    parameterPath: "blockId",
+    mapper: {
+        serializedName: "blockid",
+        required: true,
+        xmlName: "blockid",
+        type: {
+            name: "String",
+        },
+    },
+};
+const blocks = {
+    parameterPath: "blocks",
+    mapper: BlockLookupList,
+};
+const comp25 = {
+    parameterPath: "comp",
+    mapper: {
+        defaultValue: "blocklist",
+        isConstant: true,
+        serializedName: "comp",
+        type: {
+            name: "String",
+        },
+    },
+};
+const listType = {
+    parameterPath: "listType",
+    mapper: {
+        defaultValue: "committed",
+        serializedName: "blocklisttype",
+        required: true,
+        xmlName: "blocklisttype",
+        type: {
+            name: "Enum",
+            allowedValues: ["committed", "uncommitted", "all"],
+        },
+    },
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+/** Class containing Service operations. */
+class ServiceImpl {
+    /**
+     * Initialize a new instance of the class Service class.
+     * @param client Reference to the service client
+     */
+    constructor(client) {
+        this.client = client;
+    }
+    /**
+     * Sets properties for a storage account's Blob service endpoint, including properties for Storage
+     * Analytics and CORS (Cross-Origin Resource Sharing) rules
+     * @param blobServiceProperties The StorageService properties.
+     * @param options The options parameters.
+     */
+    setProperties(blobServiceProperties, options) {
+        return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec);
+    }
+    /**
+     * gets the properties of a storage account's Blob service, including properties for Storage Analytics
+     * and CORS (Cross-Origin Resource Sharing) rules.
+     * @param options The options parameters.
+     */
+    getProperties(options) {
+        return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2);
+    }
+    /**
+     * Retrieves statistics related to replication for the Blob service. It is only available on the
+     * secondary location endpoint when read-access geo-redundant replication is enabled for the storage
+     * account.
+     * @param options The options parameters.
+     */
+    getStatistics(options) {
+        return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec);
+    }
+    /**
+     * The List Containers Segment operation returns a list of the containers under the specified account
+     * @param options The options parameters.
+     */
+    listContainersSegment(options) {
+        return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec);
+    }
+    /**
+     * Retrieves a user delegation key for the Blob service. This is only a valid operation when using
+     * bearer token authentication.
+     * @param keyInfo Key information
+     * @param options The options parameters.
+     */
+    getUserDelegationKey(keyInfo, options) {
+        return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec);
+    }
+    /**
+     * Returns the sku name and account kind
+     * @param options The options parameters.
+     */
+    getAccountInfo(options) {
+        return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2);
+    }
+    /**
+     * The Batch operation allows multiple API calls to be embedded into a single HTTP request.
+     * @param contentLength The length of the request.
+     * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch
+     *                             boundary. Example header value: multipart/mixed; boundary=batch_<GUID>
+     * @param body Initial data
+     * @param options The options parameters.
+     */
+    submitBatch(contentLength, multipartContentType, body, options) {
+        return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1);
+    }
+    /**
+     * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a
+     * given search expression.  Filter blobs searches across all containers within a storage account but
+     * can be scoped within the expression to a single container.
+     * @param options The options parameters.
+     */
+    filterBlobs(options) {
+        return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1);
+    }
+}
+// Operation Specifications
+const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true);
+const setPropertiesOperationSpec = {
+    path: "/",
+    httpMethod: "PUT",
+    responses: {
+        202: {
+            headersMapper: ServiceSetPropertiesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceSetPropertiesExceptionHeaders,
+        },
+    },
+    requestBody: blobServiceProperties,
+    queryParameters: [
+        restype,
+        comp,
+        timeoutInSeconds,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        contentType,
+        accept,
+        version,
+        requestId,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$5,
+};
+const getPropertiesOperationSpec$2 = {
+    path: "/",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: BlobServiceProperties,
+            headersMapper: ServiceGetPropertiesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceGetPropertiesExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        restype,
+        comp,
+        timeoutInSeconds,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$5,
+};
+const getStatisticsOperationSpec = {
+    path: "/",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: BlobServiceStatistics,
+            headersMapper: ServiceGetStatisticsHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceGetStatisticsExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        restype,
+        timeoutInSeconds,
+        comp1,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$5,
+};
+const listContainersSegmentOperationSpec = {
+    path: "/",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: ListContainersSegmentResponse,
+            headersMapper: ServiceListContainersSegmentHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceListContainersSegmentExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        comp2,
+        prefix,
+        marker,
+        maxPageSize,
+        include,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$5,
+};
+const getUserDelegationKeyOperationSpec = {
+    path: "/",
+    httpMethod: "POST",
+    responses: {
+        200: {
+            bodyMapper: UserDelegationKey,
+            headersMapper: ServiceGetUserDelegationKeyHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceGetUserDelegationKeyExceptionHeaders,
+        },
+    },
+    requestBody: keyInfo,
+    queryParameters: [
+        restype,
+        timeoutInSeconds,
+        comp3,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        contentType,
+        accept,
+        version,
+        requestId,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$5,
+};
+const getAccountInfoOperationSpec$2 = {
+    path: "/",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            headersMapper: ServiceGetAccountInfoHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceGetAccountInfoExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        comp,
+        timeoutInSeconds,
+        restype1,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$5,
+};
+const submitBatchOperationSpec$1 = {
+    path: "/",
+    httpMethod: "POST",
+    responses: {
+        202: {
+            bodyMapper: {
+                type: { name: "Stream" },
+                serializedName: "parsedResponse",
+            },
+            headersMapper: ServiceSubmitBatchHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceSubmitBatchExceptionHeaders,
+        },
+    },
+    requestBody: body,
+    queryParameters: [timeoutInSeconds, comp4],
+    urlParameters: [url],
+    headerParameters: [
+        accept,
+        version,
+        requestId,
+        contentLength,
+        multipartContentType,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$5,
+};
+const filterBlobsOperationSpec$1 = {
+    path: "/",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: FilterBlobSegment,
+            headersMapper: ServiceFilterBlobsHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ServiceFilterBlobsExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        marker,
+        maxPageSize,
+        comp5,
+        where,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$5,
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+/** Class containing Container operations. */
+class ContainerImpl {
+    /**
+     * Initialize a new instance of the class Container class.
+     * @param client Reference to the service client
+     */
+    constructor(client) {
+        this.client = client;
+    }
+    /**
+     * creates a new container under the specified account. If the container with the same name already
+     * exists, the operation fails
+     * @param options The options parameters.
+     */
+    create(options) {
+        return this.client.sendOperationRequest({ options }, createOperationSpec$2);
+    }
+    /**
+     * returns all user-defined metadata and system properties for the specified container. The data
+     * returned does not include the container's list of blobs
+     * @param options The options parameters.
+     */
+    getProperties(options) {
+        return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1);
+    }
+    /**
+     * operation marks the specified container for deletion. The container and any blobs contained within
+     * it are later deleted during garbage collection
+     * @param options The options parameters.
+     */
+    delete(options) {
+        return this.client.sendOperationRequest({ options }, deleteOperationSpec$1);
+    }
+    /**
+     * operation sets one or more user-defined name-value pairs for the specified container.
+     * @param options The options parameters.
+     */
+    setMetadata(options) {
+        return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1);
+    }
+    /**
+     * gets the permissions for the specified container. The permissions indicate whether container data
+     * may be accessed publicly.
+     * @param options The options parameters.
+     */
+    getAccessPolicy(options) {
+        return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec);
+    }
+    /**
+     * sets the permissions for the specified container. The permissions indicate whether blobs in a
+     * container may be accessed publicly.
+     * @param options The options parameters.
+     */
+    setAccessPolicy(options) {
+        return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec);
+    }
+    /**
+     * Restores a previously-deleted container.
+     * @param options The options parameters.
+     */
+    restore(options) {
+        return this.client.sendOperationRequest({ options }, restoreOperationSpec);
+    }
+    /**
+     * Renames an existing container.
+     * @param sourceContainerName Required.  Specifies the name of the container to rename.
+     * @param options The options parameters.
+     */
+    rename(sourceContainerName, options) {
+        return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec);
+    }
+    /**
+     * The Batch operation allows multiple API calls to be embedded into a single HTTP request.
+     * @param contentLength The length of the request.
+     * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch
+     *                             boundary. Example header value: multipart/mixed; boundary=batch_<GUID>
+     * @param body Initial data
+     * @param options The options parameters.
+     */
+    submitBatch(contentLength, multipartContentType, body, options) {
+        return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec);
+    }
+    /**
+     * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given
+     * search expression.  Filter blobs searches within the given container.
+     * @param options The options parameters.
+     */
+    filterBlobs(options) {
+        return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec);
+    }
+    /**
+     * [Update] establishes and manages a lock on a container for delete operations. The lock duration can
+     * be 15 to 60 seconds, or can be infinite
+     * @param options The options parameters.
+     */
+    acquireLease(options) {
+        return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1);
+    }
+    /**
+     * [Update] establishes and manages a lock on a container for delete operations. The lock duration can
+     * be 15 to 60 seconds, or can be infinite
+     * @param leaseId Specifies the current lease ID on the resource.
+     * @param options The options parameters.
+     */
+    releaseLease(leaseId, options) {
+        return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1);
+    }
+    /**
+     * [Update] establishes and manages a lock on a container for delete operations. The lock duration can
+     * be 15 to 60 seconds, or can be infinite
+     * @param leaseId Specifies the current lease ID on the resource.
+     * @param options The options parameters.
+     */
+    renewLease(leaseId, options) {
+        return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1);
+    }
+    /**
+     * [Update] establishes and manages a lock on a container for delete operations. The lock duration can
+     * be 15 to 60 seconds, or can be infinite
+     * @param options The options parameters.
+     */
+    breakLease(options) {
+        return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1);
+    }
+    /**
+     * [Update] establishes and manages a lock on a container for delete operations. The lock duration can
+     * be 15 to 60 seconds, or can be infinite
+     * @param leaseId Specifies the current lease ID on the resource.
+     * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400
+     *                        (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor
+     *                        (String) for a list of valid GUID string formats.
+     * @param options The options parameters.
+     */
+    changeLease(leaseId, proposedLeaseId, options) {
+        return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1);
+    }
+    /**
+     * [Update] The List Blobs operation returns a list of the blobs under the specified container
+     * @param options The options parameters.
+     */
+    listBlobFlatSegment(options) {
+        return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec);
+    }
+    /**
+     * [Update] The List Blobs operation returns a list of the blobs under the specified container
+     * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix
+     *                  element in the response body that acts as a placeholder for all blobs whose names begin with the
+     *                  same substring up to the appearance of the delimiter character. The delimiter may be a single
+     *                  character or a string.
+     * @param options The options parameters.
+     */
+    listBlobHierarchySegment(delimiter, options) {
+        return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec);
+    }
+    /**
+     * Returns the sku name and account kind
+     * @param options The options parameters.
+     */
+    getAccountInfo(options) {
+        return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1);
+    }
+}
+// Operation Specifications
+const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true);
+const createOperationSpec$2 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: ContainerCreateHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerCreateExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, restype2],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        metadata,
+        access,
+        defaultEncryptionScope,
+        preventEncryptionScopeOverride,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const getPropertiesOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            headersMapper: ContainerGetPropertiesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerGetPropertiesExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, restype2],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const deleteOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "DELETE",
+    responses: {
+        202: {
+            headersMapper: ContainerDeleteHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerDeleteExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, restype2],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const setMetadataOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: ContainerSetMetadataHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerSetMetadataExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp6,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const getAccessPolicyOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: {
+                type: {
+                    name: "Sequence",
+                    element: {
+                        type: { name: "Composite", className: "SignedIdentifier" },
+                    },
+                },
+                serializedName: "SignedIdentifiers",
+                xmlName: "SignedIdentifiers",
+                xmlIsWrapped: true,
+                xmlElementName: "SignedIdentifier",
+            },
+            headersMapper: ContainerGetAccessPolicyHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerGetAccessPolicyExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp7,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const setAccessPolicyOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: ContainerSetAccessPolicyHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerSetAccessPolicyExceptionHeaders,
+        },
+    },
+    requestBody: containerAcl,
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp7,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        contentType,
+        accept,
+        version,
+        requestId,
+        access,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$4,
+};
+const restoreOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: ContainerRestoreHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerRestoreExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp8,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        deletedContainerName,
+        deletedContainerVersion,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const renameOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: ContainerRenameHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerRenameExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp9,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        sourceContainerName,
+        sourceLeaseId,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const submitBatchOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "POST",
+    responses: {
+        202: {
+            bodyMapper: {
+                type: { name: "Stream" },
+                serializedName: "parsedResponse",
+            },
+            headersMapper: ContainerSubmitBatchHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerSubmitBatchExceptionHeaders,
+        },
+    },
+    requestBody: body,
+    queryParameters: [
+        timeoutInSeconds,
+        comp4,
+        restype2,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        accept,
+        version,
+        requestId,
+        contentLength,
+        multipartContentType,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$4,
+};
+const filterBlobsOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: FilterBlobSegment,
+            headersMapper: ContainerFilterBlobsHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerFilterBlobsExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        marker,
+        maxPageSize,
+        comp5,
+        where,
+        restype2,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const acquireLeaseOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: ContainerAcquireLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerAcquireLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp10,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        action,
+        duration,
+        proposedLeaseId,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const releaseLeaseOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: ContainerReleaseLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerReleaseLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp10,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        action1,
+        leaseId1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const renewLeaseOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: ContainerRenewLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerRenewLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp10,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        leaseId1,
+        action2,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const breakLeaseOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        202: {
+            headersMapper: ContainerBreakLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerBreakLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp10,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        action3,
+        breakPeriod,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const changeLeaseOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: ContainerChangeLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerChangeLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        restype2,
+        comp10,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        leaseId1,
+        action4,
+        proposedLeaseId1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const listBlobFlatSegmentOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: ListBlobsFlatSegmentResponse,
+            headersMapper: ContainerListBlobFlatSegmentHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerListBlobFlatSegmentExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        comp2,
+        prefix,
+        marker,
+        maxPageSize,
+        restype2,
+        include1,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const listBlobHierarchySegmentOperationSpec = {
+    path: "/{containerName}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: ListBlobsHierarchySegmentResponse,
+            headersMapper: ContainerListBlobHierarchySegmentHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        comp2,
+        prefix,
+        marker,
+        maxPageSize,
+        restype2,
+        include1,
+        delimiter,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+const getAccountInfoOperationSpec$1 = {
+    path: "/{containerName}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            headersMapper: ContainerGetAccountInfoHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: ContainerGetAccountInfoExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        comp,
+        timeoutInSeconds,
+        restype1,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$4,
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+/** Class containing Blob operations. */
+class BlobImpl {
+    /**
+     * Initialize a new instance of the class Blob class.
+     * @param client Reference to the service client
+     */
+    constructor(client) {
+        this.client = client;
+    }
+    /**
+     * The Download operation reads or downloads a blob from the system, including its metadata and
+     * properties. You can also call Download to read a snapshot.
+     * @param options The options parameters.
+     */
+    download(options) {
+        return this.client.sendOperationRequest({ options }, downloadOperationSpec);
+    }
+    /**
+     * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system
+     * properties for the blob. It does not return the content of the blob.
+     * @param options The options parameters.
+     */
+    getProperties(options) {
+        return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec);
+    }
+    /**
+     * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is
+     * permanently removed from the storage account. If the storage account's soft delete feature is
+     * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible
+     * immediately. However, the blob service retains the blob or snapshot for the number of days specified
+     * by the DeleteRetentionPolicy section of [Storage service properties]
+     * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is
+     * permanently removed from the storage account. Note that you continue to be charged for the
+     * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the
+     * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You
+     * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a
+     * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404
+     * (ResourceNotFound).
+     * @param options The options parameters.
+     */
+    delete(options) {
+        return this.client.sendOperationRequest({ options }, deleteOperationSpec);
+    }
+    /**
+     * Undelete a blob that was previously soft deleted
+     * @param options The options parameters.
+     */
+    undelete(options) {
+        return this.client.sendOperationRequest({ options }, undeleteOperationSpec);
+    }
+    /**
+     * Sets the time a blob will expire and be deleted.
+     * @param expiryOptions Required. Indicates mode of the expiry time
+     * @param options The options parameters.
+     */
+    setExpiry(expiryOptions, options) {
+        return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec);
+    }
+    /**
+     * The Set HTTP Headers operation sets system properties on the blob
+     * @param options The options parameters.
+     */
+    setHttpHeaders(options) {
+        return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec);
+    }
+    /**
+     * The Set Immutability Policy operation sets the immutability policy on the blob
+     * @param options The options parameters.
+     */
+    setImmutabilityPolicy(options) {
+        return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec);
+    }
+    /**
+     * The Delete Immutability Policy operation deletes the immutability policy on the blob
+     * @param options The options parameters.
+     */
+    deleteImmutabilityPolicy(options) {
+        return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec);
+    }
+    /**
+     * The Set Legal Hold operation sets a legal hold on the blob.
+     * @param legalHold Specified if a legal hold should be set on the blob.
+     * @param options The options parameters.
+     */
+    setLegalHold(legalHold, options) {
+        return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec);
+    }
+    /**
+     * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more
+     * name-value pairs
+     * @param options The options parameters.
+     */
+    setMetadata(options) {
+        return this.client.sendOperationRequest({ options }, setMetadataOperationSpec);
+    }
+    /**
+     * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
+     * operations
+     * @param options The options parameters.
+     */
+    acquireLease(options) {
+        return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec);
+    }
+    /**
+     * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
+     * operations
+     * @param leaseId Specifies the current lease ID on the resource.
+     * @param options The options parameters.
+     */
+    releaseLease(leaseId, options) {
+        return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec);
+    }
+    /**
+     * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
+     * operations
+     * @param leaseId Specifies the current lease ID on the resource.
+     * @param options The options parameters.
+     */
+    renewLease(leaseId, options) {
+        return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec);
+    }
+    /**
+     * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
+     * operations
+     * @param leaseId Specifies the current lease ID on the resource.
+     * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400
+     *                        (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor
+     *                        (String) for a list of valid GUID string formats.
+     * @param options The options parameters.
+     */
+    changeLease(leaseId, proposedLeaseId, options) {
+        return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec);
+    }
+    /**
+     * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete
+     * operations
+     * @param options The options parameters.
+     */
+    breakLease(options) {
+        return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec);
+    }
+    /**
+     * The Create Snapshot operation creates a read-only snapshot of a blob
+     * @param options The options parameters.
+     */
+    createSnapshot(options) {
+        return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec);
+    }
+    /**
+     * The Start Copy From URL operation copies a blob or an internet resource to a new blob.
+     * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to
+     *                   2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would
+     *                   appear in a request URI. The source blob must either be public or must be authenticated via a shared
+     *                   access signature.
+     * @param options The options parameters.
+     */
+    startCopyFromURL(copySource, options) {
+        return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec);
+    }
+    /**
+     * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return
+     * a response until the copy is complete.
+     * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to
+     *                   2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would
+     *                   appear in a request URI. The source blob must either be public or must be authenticated via a shared
+     *                   access signature.
+     * @param options The options parameters.
+     */
+    copyFromURL(copySource, options) {
+        return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec);
+    }
+    /**
+     * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination
+     * blob with zero length and full metadata.
+     * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob
+     *               operation.
+     * @param options The options parameters.
+     */
+    abortCopyFromURL(copyId, options) {
+        return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec);
+    }
+    /**
+     * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium
+     * storage account and on a block blob in a blob storage account (locally redundant storage only). A
+     * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block
+     * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's
+     * ETag.
+     * @param tier Indicates the tier to be set on the blob.
+     * @param options The options parameters.
+     */
+    setTier(tier, options) {
+        return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec);
+    }
+    /**
+     * Returns the sku name and account kind
+     * @param options The options parameters.
+     */
+    getAccountInfo(options) {
+        return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec);
+    }
+    /**
+     * The Query operation enables users to select/project on blob data by providing simple query
+     * expressions.
+     * @param options The options parameters.
+     */
+    query(options) {
+        return this.client.sendOperationRequest({ options }, queryOperationSpec);
+    }
+    /**
+     * The Get Tags operation enables users to get the tags associated with a blob.
+     * @param options The options parameters.
+     */
+    getTags(options) {
+        return this.client.sendOperationRequest({ options }, getTagsOperationSpec);
+    }
+    /**
+     * The Set Tags operation enables users to set tags on a blob.
+     * @param options The options parameters.
+     */
+    setTags(options) {
+        return this.client.sendOperationRequest({ options }, setTagsOperationSpec);
+    }
+}
+// Operation Specifications
+const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true);
+const downloadOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: {
+                type: { name: "Stream" },
+                serializedName: "parsedResponse",
+            },
+            headersMapper: BlobDownloadHeaders,
+        },
+        206: {
+            bodyMapper: {
+                type: { name: "Stream" },
+                serializedName: "parsedResponse",
+            },
+            headersMapper: BlobDownloadHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobDownloadExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        range,
+        rangeGetContentMD5,
+        rangeGetContentCRC64,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const getPropertiesOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "HEAD",
+    responses: {
+        200: {
+            headersMapper: BlobGetPropertiesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobGetPropertiesExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const deleteOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "DELETE",
+    responses: {
+        202: {
+            headersMapper: BlobDeleteHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobDeleteExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+        blobDeleteType,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        deleteSnapshots,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const undeleteOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobUndeleteHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobUndeleteExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp8],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setExpiryOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobSetExpiryHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetExpiryExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp11],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        expiryOptions,
+        expiresOn,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setHttpHeadersOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobSetHttpHeadersHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetHttpHeadersExceptionHeaders,
+        },
+    },
+    queryParameters: [comp, timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobCacheControl,
+        blobContentType,
+        blobContentMD5,
+        blobContentEncoding,
+        blobContentLanguage,
+        blobContentDisposition,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setImmutabilityPolicyOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobSetImmutabilityPolicyHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetImmutabilityPolicyExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+        comp12,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifUnmodifiedSince,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const deleteImmutabilityPolicyOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "DELETE",
+    responses: {
+        200: {
+            headersMapper: BlobDeleteImmutabilityPolicyHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+        comp12,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setLegalHoldOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobSetLegalHoldHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetLegalHoldExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+        comp13,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        legalHold,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setMetadataOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobSetMetadataHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetMetadataExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp6],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const acquireLeaseOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlobAcquireLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobAcquireLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp10],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        action,
+        duration,
+        proposedLeaseId,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const releaseLeaseOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobReleaseLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobReleaseLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp10],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        action1,
+        leaseId1,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const renewLeaseOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobRenewLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobRenewLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp10],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        leaseId1,
+        action2,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const changeLeaseOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobChangeLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobChangeLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp10],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        leaseId1,
+        action4,
+        proposedLeaseId1,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const breakLeaseOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        202: {
+            headersMapper: BlobBreakLeaseHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobBreakLeaseExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp10],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        action3,
+        breakPeriod,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const createSnapshotOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlobCreateSnapshotHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobCreateSnapshotExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp14],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const startCopyFromURLOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        202: {
+            headersMapper: BlobStartCopyFromURLHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobStartCopyFromURLExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+        tier,
+        rehydratePriority,
+        sourceIfModifiedSince,
+        sourceIfUnmodifiedSince,
+        sourceIfMatch,
+        sourceIfNoneMatch,
+        sourceIfTags,
+        copySource,
+        blobTagsString,
+        sealBlob,
+        legalHold1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const copyFromURLOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        202: {
+            headersMapper: BlobCopyFromURLHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobCopyFromURLExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+        encryptionScope,
+        tier,
+        sourceIfModifiedSince,
+        sourceIfUnmodifiedSince,
+        sourceIfMatch,
+        sourceIfNoneMatch,
+        copySource,
+        blobTagsString,
+        legalHold1,
+        xMsRequiresSync,
+        sourceContentMD5,
+        copySourceAuthorization,
+        copySourceTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const abortCopyFromURLOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        204: {
+            headersMapper: BlobAbortCopyFromURLHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobAbortCopyFromURLExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        comp15,
+        copyId,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        copyActionAbortConstant,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setTierOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: BlobSetTierHeaders,
+        },
+        202: {
+            headersMapper: BlobSetTierHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetTierExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+        comp16,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifTags,
+        rehydratePriority,
+        tier1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const getAccountInfoOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            headersMapper: BlobGetAccountInfoHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobGetAccountInfoExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        comp,
+        timeoutInSeconds,
+        restype1,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const queryOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "POST",
+    responses: {
+        200: {
+            bodyMapper: {
+                type: { name: "Stream" },
+                serializedName: "parsedResponse",
+            },
+            headersMapper: BlobQueryHeaders,
+        },
+        206: {
+            bodyMapper: {
+                type: { name: "Stream" },
+                serializedName: "parsedResponse",
+            },
+            headersMapper: BlobQueryHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobQueryExceptionHeaders,
+        },
+    },
+    requestBody: queryRequest,
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        comp17,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        contentType,
+        accept,
+        version,
+        requestId,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$3,
+};
+const getTagsOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: BlobTags,
+            headersMapper: BlobGetTagsHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobGetTagsExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        versionId,
+        comp18,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$3,
+};
+const setTagsOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        204: {
+            headersMapper: BlobSetTagsHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlobSetTagsExceptionHeaders,
+        },
+    },
+    requestBody: tags,
+    queryParameters: [
+        timeoutInSeconds,
+        versionId,
+        comp18,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        contentType,
+        accept,
+        version,
+        requestId,
+        leaseId,
+        ifTags,
+        transactionalContentMD5,
+        transactionalContentCrc64,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer$3,
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+/** Class containing PageBlob operations. */
+class PageBlobImpl {
+    /**
+     * Initialize a new instance of the class PageBlob class.
+     * @param client Reference to the service client
+     */
+    constructor(client) {
+        this.client = client;
+    }
+    /**
+     * The Create operation creates a new page blob.
+     * @param contentLength The length of the request.
+     * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The
+     *                          page blob size must be aligned to a 512-byte boundary.
+     * @param options The options parameters.
+     */
+    create(contentLength, blobContentLength, options) {
+        return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1);
+    }
+    /**
+     * The Upload Pages operation writes a range of pages to a page blob
+     * @param contentLength The length of the request.
+     * @param body Initial data
+     * @param options The options parameters.
+     */
+    uploadPages(contentLength, body, options) {
+        return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec);
+    }
+    /**
+     * The Clear Pages operation clears a set of pages from a page blob
+     * @param contentLength The length of the request.
+     * @param options The options parameters.
+     */
+    clearPages(contentLength, options) {
+        return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec);
+    }
+    /**
+     * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a
+     * URL
+     * @param sourceUrl Specify a URL to the copy source.
+     * @param sourceRange Bytes of source data in the specified range. The length of this range should
+     *                    match the ContentLength header and x-ms-range/Range destination range header.
+     * @param contentLength The length of the request.
+     * @param range The range of bytes to which the source range would be written. The range should be 512
+     *              aligned and range-end is required.
+     * @param options The options parameters.
+     */
+    uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) {
+        return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec);
+    }
+    /**
+     * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a
+     * page blob
+     * @param options The options parameters.
+     */
+    getPageRanges(options) {
+        return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec);
+    }
+    /**
+     * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were
+     * changed between target blob and previous snapshot.
+     * @param options The options parameters.
+     */
+    getPageRangesDiff(options) {
+        return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec);
+    }
+    /**
+     * Resize the Blob
+     * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The
+     *                          page blob size must be aligned to a 512-byte boundary.
+     * @param options The options parameters.
+     */
+    resize(blobContentLength, options) {
+        return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec);
+    }
+    /**
+     * Update the sequence number of the blob
+     * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.
+     *                             This property applies to page blobs only. This property indicates how the service should modify the
+     *                             blob's sequence number
+     * @param options The options parameters.
+     */
+    updateSequenceNumber(sequenceNumberAction, options) {
+        return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec);
+    }
+    /**
+     * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.
+     * The snapshot is copied such that only the differential changes between the previously copied
+     * snapshot are transferred to the destination. The copied snapshots are complete copies of the
+     * original snapshot and can be read or copied from as usual. This API is supported since REST version
+     * 2016-05-31.
+     * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to
+     *                   2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would
+     *                   appear in a request URI. The source blob must either be public or must be authenticated via a shared
+     *                   access signature.
+     * @param options The options parameters.
+     */
+    copyIncremental(copySource, options) {
+        return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec);
+    }
+}
+// Operation Specifications
+const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true);
+const createOperationSpec$1 = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: PageBlobCreateHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobCreateExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobCacheControl,
+        blobContentType,
+        blobContentMD5,
+        blobContentEncoding,
+        blobContentLanguage,
+        blobContentDisposition,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+        encryptionScope,
+        tier,
+        blobTagsString,
+        legalHold1,
+        blobType,
+        blobContentLength,
+        blobSequenceNumber,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const uploadPagesOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: PageBlobUploadPagesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobUploadPagesExceptionHeaders,
+        },
+    },
+    requestBody: body1,
+    queryParameters: [timeoutInSeconds, comp19],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        contentLength,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        range,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+        transactionalContentMD5,
+        transactionalContentCrc64,
+        contentType1,
+        accept2,
+        pageWrite,
+        ifSequenceNumberLessThanOrEqualTo,
+        ifSequenceNumberLessThan,
+        ifSequenceNumberEqualTo,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "binary",
+    serializer: xmlSerializer$2,
+};
+const clearPagesOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: PageBlobClearPagesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobClearPagesExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp19],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        range,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+        ifSequenceNumberLessThanOrEqualTo,
+        ifSequenceNumberLessThan,
+        ifSequenceNumberEqualTo,
+        pageWrite1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const uploadPagesFromURLOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: PageBlobUploadPagesFromURLHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobUploadPagesFromURLExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp19],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+        sourceIfModifiedSince,
+        sourceIfUnmodifiedSince,
+        sourceIfMatch,
+        sourceIfNoneMatch,
+        sourceContentMD5,
+        copySourceAuthorization,
+        pageWrite,
+        ifSequenceNumberLessThanOrEqualTo,
+        ifSequenceNumberLessThan,
+        ifSequenceNumberEqualTo,
+        sourceUrl,
+        sourceRange,
+        sourceContentCrc64,
+        range1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const getPageRangesOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: PageList,
+            headersMapper: PageBlobGetPageRangesHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobGetPageRangesExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        marker,
+        maxPageSize,
+        snapshot,
+        comp20,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        range,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const getPageRangesDiffOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: PageList,
+            headersMapper: PageBlobGetPageRangesDiffHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobGetPageRangesDiffExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        marker,
+        maxPageSize,
+        snapshot,
+        comp20,
+        prevsnapshot,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        range,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        prevSnapshotUrl,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const resizeOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: PageBlobResizeHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobResizeExceptionHeaders,
+        },
+    },
+    queryParameters: [comp, timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+        blobContentLength,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const updateSequenceNumberOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: PageBlobUpdateSequenceNumberHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders,
+        },
+    },
+    queryParameters: [comp, timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobSequenceNumber,
+        sequenceNumberAction,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+const copyIncrementalOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        202: {
+            headersMapper: PageBlobCopyIncrementalHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: PageBlobCopyIncrementalExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp21],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        copySource,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$2,
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+/** Class containing AppendBlob operations. */
+class AppendBlobImpl {
+    /**
+     * Initialize a new instance of the class AppendBlob class.
+     * @param client Reference to the service client
+     */
+    constructor(client) {
+        this.client = client;
+    }
+    /**
+     * The Create Append Blob operation creates a new append blob.
+     * @param contentLength The length of the request.
+     * @param options The options parameters.
+     */
+    create(contentLength, options) {
+        return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec);
+    }
+    /**
+     * The Append Block operation commits a new block of data to the end of an existing append blob. The
+     * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to
+     * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.
+     * @param contentLength The length of the request.
+     * @param body Initial data
+     * @param options The options parameters.
+     */
+    appendBlock(contentLength, body, options) {
+        return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec);
+    }
+    /**
+     * The Append Block operation commits a new block of data to the end of an existing append blob where
+     * the contents are read from a source url. The Append Block operation is permitted only if the blob
+     * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version
+     * 2015-02-21 version or later.
+     * @param sourceUrl Specify a URL to the copy source.
+     * @param contentLength The length of the request.
+     * @param options The options parameters.
+     */
+    appendBlockFromUrl(sourceUrl, contentLength, options) {
+        return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec);
+    }
+    /**
+     * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version
+     * 2019-12-12 version or later.
+     * @param options The options parameters.
+     */
+    seal(options) {
+        return this.client.sendOperationRequest({ options }, sealOperationSpec);
+    }
+}
+// Operation Specifications
+const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true);
+const createOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: AppendBlobCreateHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: AppendBlobCreateExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobCacheControl,
+        blobContentType,
+        blobContentMD5,
+        blobContentEncoding,
+        blobContentLanguage,
+        blobContentDisposition,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+        encryptionScope,
+        blobTagsString,
+        legalHold1,
+        blobType1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$1,
+};
+const appendBlockOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: AppendBlobAppendBlockHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: AppendBlobAppendBlockExceptionHeaders,
+        },
+    },
+    requestBody: body1,
+    queryParameters: [timeoutInSeconds, comp22],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        contentLength,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+        transactionalContentMD5,
+        transactionalContentCrc64,
+        contentType1,
+        accept2,
+        maxSize,
+        appendPosition,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "binary",
+    serializer: xmlSerializer$1,
+};
+const appendBlockFromUrlOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: AppendBlobAppendBlockFromUrlHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp22],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        encryptionScope,
+        sourceIfModifiedSince,
+        sourceIfUnmodifiedSince,
+        sourceIfMatch,
+        sourceIfNoneMatch,
+        sourceContentMD5,
+        copySourceAuthorization,
+        transactionalContentMD5,
+        sourceUrl,
+        sourceContentCrc64,
+        maxSize,
+        appendPosition,
+        sourceRange1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$1,
+};
+const sealOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        200: {
+            headersMapper: AppendBlobSealHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: AppendBlobSealExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds, comp23],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        ifMatch,
+        ifNoneMatch,
+        appendPosition,
+    ],
+    isXML: true,
+    serializer: xmlSerializer$1,
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+/** Class containing BlockBlob operations. */
+class BlockBlobImpl {
+    /**
+     * Initialize a new instance of the class BlockBlob class.
+     * @param client Reference to the service client
+     */
+    constructor(client) {
+        this.client = client;
+    }
+    /**
+     * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing
+     * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put
+     * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a
+     * partial update of the content of a block blob, use the Put Block List operation.
+     * @param contentLength The length of the request.
+     * @param body Initial data
+     * @param options The options parameters.
+     */
+    upload(contentLength, body, options) {
+        return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec);
+    }
+    /**
+     * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read
+     * from a given URL.  This API is supported beginning with the 2020-04-08 version. Partial updates are
+     * not supported with Put Blob from URL; the content of an existing blob is overwritten with the
+     * content of the new blob.  To perform partial updates to a block blob’s contents using a source URL,
+     * use the Put Block from URL API in conjunction with Put Block List.
+     * @param contentLength The length of the request.
+     * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to
+     *                   2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would
+     *                   appear in a request URI. The source blob must either be public or must be authenticated via a shared
+     *                   access signature.
+     * @param options The options parameters.
+     */
+    putBlobFromUrl(contentLength, copySource, options) {
+        return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec);
+    }
+    /**
+     * The Stage Block operation creates a new block to be committed as part of a blob
+     * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string
+     *                must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified
+     *                for the blockid parameter must be the same size for each block.
+     * @param contentLength The length of the request.
+     * @param body Initial data
+     * @param options The options parameters.
+     */
+    stageBlock(blockId, contentLength, body, options) {
+        return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec);
+    }
+    /**
+     * The Stage Block operation creates a new block to be committed as part of a blob where the contents
+     * are read from a URL.
+     * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string
+     *                must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified
+     *                for the blockid parameter must be the same size for each block.
+     * @param contentLength The length of the request.
+     * @param sourceUrl Specify a URL to the copy source.
+     * @param options The options parameters.
+     */
+    stageBlockFromURL(blockId, contentLength, sourceUrl, options) {
+        return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec);
+    }
+    /**
+     * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the
+     * blob. In order to be written as part of a blob, a block must have been successfully written to the
+     * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading
+     * only those blocks that have changed, then committing the new and existing blocks together. You can
+     * do this by specifying whether to commit a block from the committed block list or from the
+     * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list
+     * it may belong to.
+     * @param blocks Blob Blocks.
+     * @param options The options parameters.
+     */
+    commitBlockList(blocks, options) {
+        return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec);
+    }
+    /**
+     * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block
+     * blob
+     * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted
+     *                 blocks, or both lists together.
+     * @param options The options parameters.
+     */
+    getBlockList(listType, options) {
+        return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec);
+    }
+}
+// Operation Specifications
+const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true);
+const uploadOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlockBlobUploadHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlockBlobUploadExceptionHeaders,
+        },
+    },
+    requestBody: body1,
+    queryParameters: [timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        contentLength,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobCacheControl,
+        blobContentType,
+        blobContentMD5,
+        blobContentEncoding,
+        blobContentLanguage,
+        blobContentDisposition,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+        encryptionScope,
+        tier,
+        blobTagsString,
+        legalHold1,
+        transactionalContentMD5,
+        transactionalContentCrc64,
+        contentType1,
+        accept2,
+        blobType2,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "binary",
+    serializer: xmlSerializer,
+};
+const putBlobFromUrlOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlockBlobPutBlobFromUrlHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders,
+        },
+    },
+    queryParameters: [timeoutInSeconds],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobCacheControl,
+        blobContentType,
+        blobContentMD5,
+        blobContentEncoding,
+        blobContentLanguage,
+        blobContentDisposition,
+        encryptionScope,
+        tier,
+        sourceIfModifiedSince,
+        sourceIfUnmodifiedSince,
+        sourceIfMatch,
+        sourceIfNoneMatch,
+        sourceIfTags,
+        copySource,
+        blobTagsString,
+        sourceContentMD5,
+        copySourceAuthorization,
+        copySourceTags,
+        transactionalContentMD5,
+        blobType2,
+        copySourceBlobProperties,
+    ],
+    isXML: true,
+    serializer: xmlSerializer,
+};
+const stageBlockOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlockBlobStageBlockHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlockBlobStageBlockExceptionHeaders,
+        },
+    },
+    requestBody: body1,
+    queryParameters: [
+        timeoutInSeconds,
+        comp24,
+        blockId,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        contentLength,
+        leaseId,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        encryptionScope,
+        transactionalContentMD5,
+        transactionalContentCrc64,
+        contentType1,
+        accept2,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "binary",
+    serializer: xmlSerializer,
+};
+const stageBlockFromURLOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlockBlobStageBlockFromURLHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlockBlobStageBlockFromURLExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        comp24,
+        blockId,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        contentLength,
+        leaseId,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        encryptionScope,
+        sourceIfModifiedSince,
+        sourceIfUnmodifiedSince,
+        sourceIfMatch,
+        sourceIfNoneMatch,
+        sourceContentMD5,
+        copySourceAuthorization,
+        sourceUrl,
+        sourceContentCrc64,
+        sourceRange1,
+    ],
+    isXML: true,
+    serializer: xmlSerializer,
+};
+const commitBlockListOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "PUT",
+    responses: {
+        201: {
+            headersMapper: BlockBlobCommitBlockListHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlockBlobCommitBlockListExceptionHeaders,
+        },
+    },
+    requestBody: blocks,
+    queryParameters: [timeoutInSeconds, comp25],
+    urlParameters: [url],
+    headerParameters: [
+        contentType,
+        accept,
+        version,
+        requestId,
+        metadata,
+        leaseId,
+        ifModifiedSince,
+        ifUnmodifiedSince,
+        encryptionKey,
+        encryptionKeySha256,
+        encryptionAlgorithm,
+        ifMatch,
+        ifNoneMatch,
+        ifTags,
+        blobCacheControl,
+        blobContentType,
+        blobContentMD5,
+        blobContentEncoding,
+        blobContentLanguage,
+        blobContentDisposition,
+        immutabilityPolicyExpiry,
+        immutabilityPolicyMode,
+        encryptionScope,
+        tier,
+        blobTagsString,
+        legalHold1,
+        transactionalContentMD5,
+        transactionalContentCrc64,
+    ],
+    isXML: true,
+    contentType: "application/xml; charset=utf-8",
+    mediaType: "xml",
+    serializer: xmlSerializer,
+};
+const getBlockListOperationSpec = {
+    path: "/{containerName}/{blob}",
+    httpMethod: "GET",
+    responses: {
+        200: {
+            bodyMapper: BlockList,
+            headersMapper: BlockBlobGetBlockListHeaders,
+        },
+        default: {
+            bodyMapper: StorageError,
+            headersMapper: BlockBlobGetBlockListExceptionHeaders,
+        },
+    },
+    queryParameters: [
+        timeoutInSeconds,
+        snapshot,
+        comp25,
+        listType,
+    ],
+    urlParameters: [url],
+    headerParameters: [
+        version,
+        requestId,
+        accept1,
+        leaseId,
+        ifTags,
+    ],
+    isXML: true,
+    serializer: xmlSerializer,
+};
+
+/*
+ * Copyright (c) Microsoft Corporation.
+ * Licensed under the MIT License.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient {
+    /**
+     * Initializes a new instance of the StorageClient class.
+     * @param url The URL of the service account, container, or blob that is the target of the desired
+     *            operation.
+     * @param options The parameter options
+     */
+    constructor(url, options) {
+        var _a, _b;
+        if (url === undefined) {
+            throw new Error("'url' cannot be null");
+        }
+        // Initializing default values for options
+        if (!options) {
+            options = {};
+        }
+        const defaults = {
+            requestContentType: "application/json; charset=utf-8",
+        };
+        const packageDetails = `azsdk-js-azure-storage-blob/12.26.0`;
+        const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix
+            ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}`
+            : `${packageDetails}`;
+        const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: {
+                userAgentPrefix,
+            }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" });
+        super(optionsWithDefaults);
+        // Parameter assignments
+        this.url = url;
+        // Assigning values to Constant parameters
+        this.version = options.version || "2025-01-05";
+        this.service = new ServiceImpl(this);
+        this.container = new ContainerImpl(this);
+        this.blob = new BlobImpl(this);
+        this.pageBlob = new PageBlobImpl(this);
+        this.appendBlob = new AppendBlobImpl(this);
+        this.blockBlob = new BlockBlobImpl(this);
+    }
+};
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * @internal
+ */
+class StorageContextClient extends StorageClient$1 {
+    async sendOperationRequest(operationArguments, operationSpec) {
+        const operationSpecToSend = Object.assign({}, operationSpec);
+        if (operationSpecToSend.path === "/{containerName}" ||
+            operationSpecToSend.path === "/{containerName}/{blob}") {
+            operationSpecToSend.path = "";
+        }
+        return super.sendOperationRequest(operationArguments, operationSpecToSend);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}
+ * and etc.
+ */
+class StorageClient {
+    /**
+     * Creates an instance of StorageClient.
+     * @param url - url to resource
+     * @param pipeline - request policy pipeline.
+     */
+    constructor(url, pipeline) {
+        // URL should be encoded and only once, protocol layer shouldn't encode URL again
+        this.url = escapeURLPath(url);
+        this.accountName = getAccountNameFromUrl(url);
+        this.pipeline = pipeline;
+        this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline));
+        this.isHttps = iEqual(getURLScheme(this.url) || "", "https");
+        this.credential = getCredentialFromPipeline(pipeline);
+        // Override protocol layer's default content-type
+        const storageClientContext = this.storageClientContext;
+        storageClientContext.requestContentType = undefined;
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Creates a span using the global tracer.
+ * @internal
+ */
+const tracingClient = coreTracing.createTracingClient({
+    packageName: "@azure/storage-blob",
+    packageVersion: SDK_VERSION,
+    namespace: "Microsoft.Storage",
+});
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting
+ * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all
+ * the values are set, this should be serialized with toString and set as the permissions field on a
+ * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but
+ * the order of the permissions is particular and this class guarantees correctness.
+ */
+class BlobSASPermissions {
+    constructor() {
+        /**
+         * Specifies Read access granted.
+         */
+        this.read = false;
+        /**
+         * Specifies Add access granted.
+         */
+        this.add = false;
+        /**
+         * Specifies Create access granted.
+         */
+        this.create = false;
+        /**
+         * Specifies Write access granted.
+         */
+        this.write = false;
+        /**
+         * Specifies Delete access granted.
+         */
+        this.delete = false;
+        /**
+         * Specifies Delete version access granted.
+         */
+        this.deleteVersion = false;
+        /**
+         * Specfies Tag access granted.
+         */
+        this.tag = false;
+        /**
+         * Specifies Move access granted.
+         */
+        this.move = false;
+        /**
+         * Specifies Execute access granted.
+         */
+        this.execute = false;
+        /**
+         * Specifies SetImmutabilityPolicy access granted.
+         */
+        this.setImmutabilityPolicy = false;
+        /**
+         * Specifies that Permanent Delete is permitted.
+         */
+        this.permanentDelete = false;
+    }
+    /**
+     * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an
+     * Error if it encounters a character that does not correspond to a valid permission.
+     *
+     * @param permissions -
+     */
+    static parse(permissions) {
+        const blobSASPermissions = new BlobSASPermissions();
+        for (const char of permissions) {
+            switch (char) {
+                case "r":
+                    blobSASPermissions.read = true;
+                    break;
+                case "a":
+                    blobSASPermissions.add = true;
+                    break;
+                case "c":
+                    blobSASPermissions.create = true;
+                    break;
+                case "w":
+                    blobSASPermissions.write = true;
+                    break;
+                case "d":
+                    blobSASPermissions.delete = true;
+                    break;
+                case "x":
+                    blobSASPermissions.deleteVersion = true;
+                    break;
+                case "t":
+                    blobSASPermissions.tag = true;
+                    break;
+                case "m":
+                    blobSASPermissions.move = true;
+                    break;
+                case "e":
+                    blobSASPermissions.execute = true;
+                    break;
+                case "i":
+                    blobSASPermissions.setImmutabilityPolicy = true;
+                    break;
+                case "y":
+                    blobSASPermissions.permanentDelete = true;
+                    break;
+                default:
+                    throw new RangeError(`Invalid permission: ${char}`);
+            }
+        }
+        return blobSASPermissions;
+    }
+    /**
+     * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it
+     * and boolean values for them.
+     *
+     * @param permissionLike -
+     */
+    static from(permissionLike) {
+        const blobSASPermissions = new BlobSASPermissions();
+        if (permissionLike.read) {
+            blobSASPermissions.read = true;
+        }
+        if (permissionLike.add) {
+            blobSASPermissions.add = true;
+        }
+        if (permissionLike.create) {
+            blobSASPermissions.create = true;
+        }
+        if (permissionLike.write) {
+            blobSASPermissions.write = true;
+        }
+        if (permissionLike.delete) {
+            blobSASPermissions.delete = true;
+        }
+        if (permissionLike.deleteVersion) {
+            blobSASPermissions.deleteVersion = true;
+        }
+        if (permissionLike.tag) {
+            blobSASPermissions.tag = true;
+        }
+        if (permissionLike.move) {
+            blobSASPermissions.move = true;
+        }
+        if (permissionLike.execute) {
+            blobSASPermissions.execute = true;
+        }
+        if (permissionLike.setImmutabilityPolicy) {
+            blobSASPermissions.setImmutabilityPolicy = true;
+        }
+        if (permissionLike.permanentDelete) {
+            blobSASPermissions.permanentDelete = true;
+        }
+        return blobSASPermissions;
+    }
+    /**
+     * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
+     * order accepted by the service.
+     *
+     * @returns A string which represents the BlobSASPermissions
+     */
+    toString() {
+        const permissions = [];
+        if (this.read) {
+            permissions.push("r");
+        }
+        if (this.add) {
+            permissions.push("a");
+        }
+        if (this.create) {
+            permissions.push("c");
+        }
+        if (this.write) {
+            permissions.push("w");
+        }
+        if (this.delete) {
+            permissions.push("d");
+        }
+        if (this.deleteVersion) {
+            permissions.push("x");
+        }
+        if (this.tag) {
+            permissions.push("t");
+        }
+        if (this.move) {
+            permissions.push("m");
+        }
+        if (this.execute) {
+            permissions.push("e");
+        }
+        if (this.setImmutabilityPolicy) {
+            permissions.push("i");
+        }
+        if (this.permanentDelete) {
+            permissions.push("y");
+        }
+        return permissions.join("");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.
+ * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.
+ * Once all the values are set, this should be serialized with toString and set as the permissions field on a
+ * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but
+ * the order of the permissions is particular and this class guarantees correctness.
+ */
+class ContainerSASPermissions {
+    constructor() {
+        /**
+         * Specifies Read access granted.
+         */
+        this.read = false;
+        /**
+         * Specifies Add access granted.
+         */
+        this.add = false;
+        /**
+         * Specifies Create access granted.
+         */
+        this.create = false;
+        /**
+         * Specifies Write access granted.
+         */
+        this.write = false;
+        /**
+         * Specifies Delete access granted.
+         */
+        this.delete = false;
+        /**
+         * Specifies Delete version access granted.
+         */
+        this.deleteVersion = false;
+        /**
+         * Specifies List access granted.
+         */
+        this.list = false;
+        /**
+         * Specfies Tag access granted.
+         */
+        this.tag = false;
+        /**
+         * Specifies Move access granted.
+         */
+        this.move = false;
+        /**
+         * Specifies Execute access granted.
+         */
+        this.execute = false;
+        /**
+         * Specifies SetImmutabilityPolicy access granted.
+         */
+        this.setImmutabilityPolicy = false;
+        /**
+         * Specifies that Permanent Delete is permitted.
+         */
+        this.permanentDelete = false;
+        /**
+         * Specifies that Filter Blobs by Tags is permitted.
+         */
+        this.filterByTags = false;
+    }
+    /**
+     * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an
+     * Error if it encounters a character that does not correspond to a valid permission.
+     *
+     * @param permissions -
+     */
+    static parse(permissions) {
+        const containerSASPermissions = new ContainerSASPermissions();
+        for (const char of permissions) {
+            switch (char) {
+                case "r":
+                    containerSASPermissions.read = true;
+                    break;
+                case "a":
+                    containerSASPermissions.add = true;
+                    break;
+                case "c":
+                    containerSASPermissions.create = true;
+                    break;
+                case "w":
+                    containerSASPermissions.write = true;
+                    break;
+                case "d":
+                    containerSASPermissions.delete = true;
+                    break;
+                case "l":
+                    containerSASPermissions.list = true;
+                    break;
+                case "t":
+                    containerSASPermissions.tag = true;
+                    break;
+                case "x":
+                    containerSASPermissions.deleteVersion = true;
+                    break;
+                case "m":
+                    containerSASPermissions.move = true;
+                    break;
+                case "e":
+                    containerSASPermissions.execute = true;
+                    break;
+                case "i":
+                    containerSASPermissions.setImmutabilityPolicy = true;
+                    break;
+                case "y":
+                    containerSASPermissions.permanentDelete = true;
+                    break;
+                case "f":
+                    containerSASPermissions.filterByTags = true;
+                    break;
+                default:
+                    throw new RangeError(`Invalid permission ${char}`);
+            }
+        }
+        return containerSASPermissions;
+    }
+    /**
+     * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it
+     * and boolean values for them.
+     *
+     * @param permissionLike -
+     */
+    static from(permissionLike) {
+        const containerSASPermissions = new ContainerSASPermissions();
+        if (permissionLike.read) {
+            containerSASPermissions.read = true;
+        }
+        if (permissionLike.add) {
+            containerSASPermissions.add = true;
+        }
+        if (permissionLike.create) {
+            containerSASPermissions.create = true;
+        }
+        if (permissionLike.write) {
+            containerSASPermissions.write = true;
+        }
+        if (permissionLike.delete) {
+            containerSASPermissions.delete = true;
+        }
+        if (permissionLike.list) {
+            containerSASPermissions.list = true;
+        }
+        if (permissionLike.deleteVersion) {
+            containerSASPermissions.deleteVersion = true;
+        }
+        if (permissionLike.tag) {
+            containerSASPermissions.tag = true;
+        }
+        if (permissionLike.move) {
+            containerSASPermissions.move = true;
+        }
+        if (permissionLike.execute) {
+            containerSASPermissions.execute = true;
+        }
+        if (permissionLike.setImmutabilityPolicy) {
+            containerSASPermissions.setImmutabilityPolicy = true;
+        }
+        if (permissionLike.permanentDelete) {
+            containerSASPermissions.permanentDelete = true;
+        }
+        if (permissionLike.filterByTags) {
+            containerSASPermissions.filterByTags = true;
+        }
+        return containerSASPermissions;
+    }
+    /**
+     * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
+     * order accepted by the service.
+     *
+     * The order of the characters should be as specified here to ensure correctness.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     */
+    toString() {
+        const permissions = [];
+        if (this.read) {
+            permissions.push("r");
+        }
+        if (this.add) {
+            permissions.push("a");
+        }
+        if (this.create) {
+            permissions.push("c");
+        }
+        if (this.write) {
+            permissions.push("w");
+        }
+        if (this.delete) {
+            permissions.push("d");
+        }
+        if (this.deleteVersion) {
+            permissions.push("x");
+        }
+        if (this.list) {
+            permissions.push("l");
+        }
+        if (this.tag) {
+            permissions.push("t");
+        }
+        if (this.move) {
+            permissions.push("m");
+        }
+        if (this.execute) {
+            permissions.push("e");
+        }
+        if (this.setImmutabilityPolicy) {
+            permissions.push("i");
+        }
+        if (this.permanentDelete) {
+            permissions.push("y");
+        }
+        if (this.filterByTags) {
+            permissions.push("f");
+        }
+        return permissions.join("");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * UserDelegationKeyCredential is only used for generation of user delegation SAS.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
+ */
+class UserDelegationKeyCredential {
+    /**
+     * Creates an instance of UserDelegationKeyCredential.
+     * @param accountName -
+     * @param userDelegationKey -
+     */
+    constructor(accountName, userDelegationKey) {
+        this.accountName = accountName;
+        this.userDelegationKey = userDelegationKey;
+        this.key = Buffer.from(userDelegationKey.value, "base64");
+    }
+    /**
+     * Generates a hash signature for an HTTP request or for a SAS.
+     *
+     * @param stringToSign -
+     */
+    computeHMACSHA256(stringToSign) {
+        // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);
+        return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Generate SasIPRange format string. For example:
+ *
+ * "8.8.8.8" or "1.1.1.1-255.255.255.255"
+ *
+ * @param ipRange -
+ */
+function ipRangeToString(ipRange) {
+    return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Protocols for generated SAS.
+ */
+exports.SASProtocol = void 0;
+(function (SASProtocol) {
+    /**
+     * Protocol that allows HTTPS only
+     */
+    SASProtocol["Https"] = "https";
+    /**
+     * Protocol that allows both HTTPS and HTTP
+     */
+    SASProtocol["HttpsAndHttp"] = "https,http";
+})(exports.SASProtocol || (exports.SASProtocol = {}));
+/**
+ * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly
+ * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}
+ * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should
+ * be taken here in case there are existing query parameters, which might affect the appropriate means of appending
+ * these query parameters).
+ *
+ * NOTE: Instances of this class are immutable.
+ */
+class SASQueryParameters {
+    /**
+     * Optional. IP range allowed for this SAS.
+     *
+     * @readonly
+     */
+    get ipRange() {
+        if (this.ipRangeInner) {
+            return {
+                end: this.ipRangeInner.end,
+                start: this.ipRangeInner.start,
+            };
+        }
+        return undefined;
+    }
+    constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) {
+        this.version = version;
+        this.signature = signature;
+        if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") {
+            // SASQueryParametersOptions
+            this.permissions = permissionsOrOptions.permissions;
+            this.services = permissionsOrOptions.services;
+            this.resourceTypes = permissionsOrOptions.resourceTypes;
+            this.protocol = permissionsOrOptions.protocol;
+            this.startsOn = permissionsOrOptions.startsOn;
+            this.expiresOn = permissionsOrOptions.expiresOn;
+            this.ipRangeInner = permissionsOrOptions.ipRange;
+            this.identifier = permissionsOrOptions.identifier;
+            this.encryptionScope = permissionsOrOptions.encryptionScope;
+            this.resource = permissionsOrOptions.resource;
+            this.cacheControl = permissionsOrOptions.cacheControl;
+            this.contentDisposition = permissionsOrOptions.contentDisposition;
+            this.contentEncoding = permissionsOrOptions.contentEncoding;
+            this.contentLanguage = permissionsOrOptions.contentLanguage;
+            this.contentType = permissionsOrOptions.contentType;
+            if (permissionsOrOptions.userDelegationKey) {
+                this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;
+                this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;
+                this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;
+                this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;
+                this.signedService = permissionsOrOptions.userDelegationKey.signedService;
+                this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;
+                this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;
+                this.correlationId = permissionsOrOptions.correlationId;
+            }
+        }
+        else {
+            this.services = services;
+            this.resourceTypes = resourceTypes;
+            this.expiresOn = expiresOn;
+            this.permissions = permissionsOrOptions;
+            this.protocol = protocol;
+            this.startsOn = startsOn;
+            this.ipRangeInner = ipRange;
+            this.encryptionScope = encryptionScope;
+            this.identifier = identifier;
+            this.resource = resource;
+            this.cacheControl = cacheControl;
+            this.contentDisposition = contentDisposition;
+            this.contentEncoding = contentEncoding;
+            this.contentLanguage = contentLanguage;
+            this.contentType = contentType;
+            if (userDelegationKey) {
+                this.signedOid = userDelegationKey.signedObjectId;
+                this.signedTenantId = userDelegationKey.signedTenantId;
+                this.signedStartsOn = userDelegationKey.signedStartsOn;
+                this.signedExpiresOn = userDelegationKey.signedExpiresOn;
+                this.signedService = userDelegationKey.signedService;
+                this.signedVersion = userDelegationKey.signedVersion;
+                this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;
+                this.correlationId = correlationId;
+            }
+        }
+    }
+    /**
+     * Encodes all SAS query parameters into a string that can be appended to a URL.
+     *
+     */
+    toString() {
+        const params = [
+            "sv",
+            "ss",
+            "srt",
+            "spr",
+            "st",
+            "se",
+            "sip",
+            "si",
+            "ses",
+            "skoid", // Signed object ID
+            "sktid", // Signed tenant ID
+            "skt", // Signed key start time
+            "ske", // Signed key expiry time
+            "sks", // Signed key service
+            "skv", // Signed key version
+            "sr",
+            "sp",
+            "sig",
+            "rscc",
+            "rscd",
+            "rsce",
+            "rscl",
+            "rsct",
+            "saoid",
+            "scid",
+        ];
+        const queries = [];
+        for (const param of params) {
+            switch (param) {
+                case "sv":
+                    this.tryAppendQueryParameter(queries, param, this.version);
+                    break;
+                case "ss":
+                    this.tryAppendQueryParameter(queries, param, this.services);
+                    break;
+                case "srt":
+                    this.tryAppendQueryParameter(queries, param, this.resourceTypes);
+                    break;
+                case "spr":
+                    this.tryAppendQueryParameter(queries, param, this.protocol);
+                    break;
+                case "st":
+                    this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined);
+                    break;
+                case "se":
+                    this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined);
+                    break;
+                case "sip":
+                    this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined);
+                    break;
+                case "si":
+                    this.tryAppendQueryParameter(queries, param, this.identifier);
+                    break;
+                case "ses":
+                    this.tryAppendQueryParameter(queries, param, this.encryptionScope);
+                    break;
+                case "skoid": // Signed object ID
+                    this.tryAppendQueryParameter(queries, param, this.signedOid);
+                    break;
+                case "sktid": // Signed tenant ID
+                    this.tryAppendQueryParameter(queries, param, this.signedTenantId);
+                    break;
+                case "skt": // Signed key start time
+                    this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined);
+                    break;
+                case "ske": // Signed key expiry time
+                    this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined);
+                    break;
+                case "sks": // Signed key service
+                    this.tryAppendQueryParameter(queries, param, this.signedService);
+                    break;
+                case "skv": // Signed key version
+                    this.tryAppendQueryParameter(queries, param, this.signedVersion);
+                    break;
+                case "sr":
+                    this.tryAppendQueryParameter(queries, param, this.resource);
+                    break;
+                case "sp":
+                    this.tryAppendQueryParameter(queries, param, this.permissions);
+                    break;
+                case "sig":
+                    this.tryAppendQueryParameter(queries, param, this.signature);
+                    break;
+                case "rscc":
+                    this.tryAppendQueryParameter(queries, param, this.cacheControl);
+                    break;
+                case "rscd":
+                    this.tryAppendQueryParameter(queries, param, this.contentDisposition);
+                    break;
+                case "rsce":
+                    this.tryAppendQueryParameter(queries, param, this.contentEncoding);
+                    break;
+                case "rscl":
+                    this.tryAppendQueryParameter(queries, param, this.contentLanguage);
+                    break;
+                case "rsct":
+                    this.tryAppendQueryParameter(queries, param, this.contentType);
+                    break;
+                case "saoid":
+                    this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);
+                    break;
+                case "scid":
+                    this.tryAppendQueryParameter(queries, param, this.correlationId);
+                    break;
+            }
+        }
+        return queries.join("&");
+    }
+    /**
+     * A private helper method used to filter and append query key/value pairs into an array.
+     *
+     * @param queries -
+     * @param key -
+     * @param value -
+     */
+    tryAppendQueryParameter(queries, key, value) {
+        if (!value) {
+            return;
+        }
+        key = encodeURIComponent(key);
+        value = encodeURIComponent(value);
+        if (key.length > 0 && value.length > 0) {
+            queries.push(`${key}=${value}`);
+        }
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) {
+    return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters;
+}
+function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) {
+    const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
+    const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential
+        ? sharedKeyCredentialOrUserDelegationKey
+        : undefined;
+    let userDelegationKeyCredential;
+    if (sharedKeyCredential === undefined && accountName !== undefined) {
+        userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey);
+    }
+    if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {
+        throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName.");
+    }
+    // Version 2020-12-06 adds support for encryptionscope in SAS.
+    if (version >= "2020-12-06") {
+        if (sharedKeyCredential !== undefined) {
+            return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);
+        }
+        else {
+            return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential);
+        }
+    }
+    // Version 2019-12-12 adds support for the blob tags permission.
+    // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.
+    // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string
+    if (version >= "2018-11-09") {
+        if (sharedKeyCredential !== undefined) {
+            return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);
+        }
+        else {
+            // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.
+            if (version >= "2020-02-10") {
+                return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential);
+            }
+            else {
+                return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential);
+            }
+        }
+    }
+    if (version >= "2015-04-05") {
+        if (sharedKeyCredential !== undefined) {
+            return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);
+        }
+        else {
+            throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.");
+        }
+    }
+    throw new RangeError("'version' must be >= '2015-04-05'.");
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn and identifier.
+ *
+ * WARNING: When identifier is not provided, permissions and expiresOn are required.
+ * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
+ * this constructor.
+ *
+ * @param blobSASSignatureValues -
+ * @param sharedKeyCredential -
+ */
+function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) {
+    blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);
+    if (!blobSASSignatureValues.identifier &&
+        !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {
+        throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
+    }
+    let resource = "c";
+    if (blobSASSignatureValues.blobName) {
+        resource = "b";
+    }
+    // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+    let verifiedPermissions;
+    if (blobSASSignatureValues.permissions) {
+        if (blobSASSignatureValues.blobName) {
+            verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+        else {
+            verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+    }
+    // Signature is generated on the un-url-encoded values.
+    const stringToSign = [
+        verifiedPermissions ? verifiedPermissions : "",
+        blobSASSignatureValues.startsOn
+            ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+            : "",
+        blobSASSignatureValues.expiresOn
+            ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+            : "",
+        getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+        blobSASSignatureValues.identifier,
+        blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+        blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+        blobSASSignatureValues.version,
+        blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
+        blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
+        blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
+        blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
+        blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "",
+    ].join("\n");
+    const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType),
+        stringToSign: stringToSign,
+    };
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn and identifier.
+ *
+ * WARNING: When identifier is not provided, permissions and expiresOn are required.
+ * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
+ * this constructor.
+ *
+ * @param blobSASSignatureValues -
+ * @param sharedKeyCredential -
+ */
+function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) {
+    blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);
+    if (!blobSASSignatureValues.identifier &&
+        !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {
+        throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
+    }
+    let resource = "c";
+    let timestamp = blobSASSignatureValues.snapshotTime;
+    if (blobSASSignatureValues.blobName) {
+        resource = "b";
+        if (blobSASSignatureValues.snapshotTime) {
+            resource = "bs";
+        }
+        else if (blobSASSignatureValues.versionId) {
+            resource = "bv";
+            timestamp = blobSASSignatureValues.versionId;
+        }
+    }
+    // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+    let verifiedPermissions;
+    if (blobSASSignatureValues.permissions) {
+        if (blobSASSignatureValues.blobName) {
+            verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+        else {
+            verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+    }
+    // Signature is generated on the un-url-encoded values.
+    const stringToSign = [
+        verifiedPermissions ? verifiedPermissions : "",
+        blobSASSignatureValues.startsOn
+            ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+            : "",
+        blobSASSignatureValues.expiresOn
+            ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+            : "",
+        getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+        blobSASSignatureValues.identifier,
+        blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+        blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+        blobSASSignatureValues.version,
+        resource,
+        timestamp,
+        blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
+        blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
+        blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
+        blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
+        blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "",
+    ].join("\n");
+    const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType),
+        stringToSign: stringToSign,
+    };
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn and identifier.
+ *
+ * WARNING: When identifier is not provided, permissions and expiresOn are required.
+ * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
+ * this constructor.
+ *
+ * @param blobSASSignatureValues -
+ * @param sharedKeyCredential -
+ */
+function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) {
+    blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);
+    if (!blobSASSignatureValues.identifier &&
+        !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) {
+        throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
+    }
+    let resource = "c";
+    let timestamp = blobSASSignatureValues.snapshotTime;
+    if (blobSASSignatureValues.blobName) {
+        resource = "b";
+        if (blobSASSignatureValues.snapshotTime) {
+            resource = "bs";
+        }
+        else if (blobSASSignatureValues.versionId) {
+            resource = "bv";
+            timestamp = blobSASSignatureValues.versionId;
+        }
+    }
+    // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+    let verifiedPermissions;
+    if (blobSASSignatureValues.permissions) {
+        if (blobSASSignatureValues.blobName) {
+            verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+        else {
+            verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+    }
+    // Signature is generated on the un-url-encoded values.
+    const stringToSign = [
+        verifiedPermissions ? verifiedPermissions : "",
+        blobSASSignatureValues.startsOn
+            ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+            : "",
+        blobSASSignatureValues.expiresOn
+            ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+            : "",
+        getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+        blobSASSignatureValues.identifier,
+        blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+        blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+        blobSASSignatureValues.version,
+        resource,
+        timestamp,
+        blobSASSignatureValues.encryptionScope,
+        blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
+        blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
+        blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
+        blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
+        blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "",
+    ].join("\n");
+    const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope),
+        stringToSign: stringToSign,
+    };
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn.
+ *
+ * WARNING: identifier will be ignored, permissions and expiresOn are required.
+ *
+ * @param blobSASSignatureValues -
+ * @param userDelegationKeyCredential -
+ */
+function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) {
+    blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);
+    // Stored access policies are not supported for a user delegation SAS.
+    if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {
+        throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.");
+    }
+    let resource = "c";
+    let timestamp = blobSASSignatureValues.snapshotTime;
+    if (blobSASSignatureValues.blobName) {
+        resource = "b";
+        if (blobSASSignatureValues.snapshotTime) {
+            resource = "bs";
+        }
+        else if (blobSASSignatureValues.versionId) {
+            resource = "bv";
+            timestamp = blobSASSignatureValues.versionId;
+        }
+    }
+    // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+    let verifiedPermissions;
+    if (blobSASSignatureValues.permissions) {
+        if (blobSASSignatureValues.blobName) {
+            verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+        else {
+            verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+    }
+    // Signature is generated on the un-url-encoded values.
+    const stringToSign = [
+        verifiedPermissions ? verifiedPermissions : "",
+        blobSASSignatureValues.startsOn
+            ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+            : "",
+        blobSASSignatureValues.expiresOn
+            ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+            : "",
+        getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+        userDelegationKeyCredential.userDelegationKey.signedObjectId,
+        userDelegationKeyCredential.userDelegationKey.signedTenantId,
+        userDelegationKeyCredential.userDelegationKey.signedStartsOn
+            ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)
+            : "",
+        userDelegationKeyCredential.userDelegationKey.signedExpiresOn
+            ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)
+            : "",
+        userDelegationKeyCredential.userDelegationKey.signedService,
+        userDelegationKeyCredential.userDelegationKey.signedVersion,
+        blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+        blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+        blobSASSignatureValues.version,
+        resource,
+        timestamp,
+        blobSASSignatureValues.cacheControl,
+        blobSASSignatureValues.contentDisposition,
+        blobSASSignatureValues.contentEncoding,
+        blobSASSignatureValues.contentLanguage,
+        blobSASSignatureValues.contentType,
+    ].join("\n");
+    const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey),
+        stringToSign: stringToSign,
+    };
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn.
+ *
+ * WARNING: identifier will be ignored, permissions and expiresOn are required.
+ *
+ * @param blobSASSignatureValues -
+ * @param userDelegationKeyCredential -
+ */
+function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) {
+    blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);
+    // Stored access policies are not supported for a user delegation SAS.
+    if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {
+        throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.");
+    }
+    let resource = "c";
+    let timestamp = blobSASSignatureValues.snapshotTime;
+    if (blobSASSignatureValues.blobName) {
+        resource = "b";
+        if (blobSASSignatureValues.snapshotTime) {
+            resource = "bs";
+        }
+        else if (blobSASSignatureValues.versionId) {
+            resource = "bv";
+            timestamp = blobSASSignatureValues.versionId;
+        }
+    }
+    // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+    let verifiedPermissions;
+    if (blobSASSignatureValues.permissions) {
+        if (blobSASSignatureValues.blobName) {
+            verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+        else {
+            verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+    }
+    // Signature is generated on the un-url-encoded values.
+    const stringToSign = [
+        verifiedPermissions ? verifiedPermissions : "",
+        blobSASSignatureValues.startsOn
+            ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+            : "",
+        blobSASSignatureValues.expiresOn
+            ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+            : "",
+        getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+        userDelegationKeyCredential.userDelegationKey.signedObjectId,
+        userDelegationKeyCredential.userDelegationKey.signedTenantId,
+        userDelegationKeyCredential.userDelegationKey.signedStartsOn
+            ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)
+            : "",
+        userDelegationKeyCredential.userDelegationKey.signedExpiresOn
+            ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)
+            : "",
+        userDelegationKeyCredential.userDelegationKey.signedService,
+        userDelegationKeyCredential.userDelegationKey.signedVersion,
+        blobSASSignatureValues.preauthorizedAgentObjectId,
+        undefined, // agentObjectId
+        blobSASSignatureValues.correlationId,
+        blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+        blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+        blobSASSignatureValues.version,
+        resource,
+        timestamp,
+        blobSASSignatureValues.cacheControl,
+        blobSASSignatureValues.contentDisposition,
+        blobSASSignatureValues.contentEncoding,
+        blobSASSignatureValues.contentLanguage,
+        blobSASSignatureValues.contentType,
+    ].join("\n");
+    const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId),
+        stringToSign: stringToSign,
+    };
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn.
+ *
+ * WARNING: identifier will be ignored, permissions and expiresOn are required.
+ *
+ * @param blobSASSignatureValues -
+ * @param userDelegationKeyCredential -
+ */
+function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) {
+    blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);
+    // Stored access policies are not supported for a user delegation SAS.
+    if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {
+        throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.");
+    }
+    let resource = "c";
+    let timestamp = blobSASSignatureValues.snapshotTime;
+    if (blobSASSignatureValues.blobName) {
+        resource = "b";
+        if (blobSASSignatureValues.snapshotTime) {
+            resource = "bs";
+        }
+        else if (blobSASSignatureValues.versionId) {
+            resource = "bv";
+            timestamp = blobSASSignatureValues.versionId;
+        }
+    }
+    // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+    let verifiedPermissions;
+    if (blobSASSignatureValues.permissions) {
+        if (blobSASSignatureValues.blobName) {
+            verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+        else {
+            verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+        }
+    }
+    // Signature is generated on the un-url-encoded values.
+    const stringToSign = [
+        verifiedPermissions ? verifiedPermissions : "",
+        blobSASSignatureValues.startsOn
+            ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+            : "",
+        blobSASSignatureValues.expiresOn
+            ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+            : "",
+        getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+        userDelegationKeyCredential.userDelegationKey.signedObjectId,
+        userDelegationKeyCredential.userDelegationKey.signedTenantId,
+        userDelegationKeyCredential.userDelegationKey.signedStartsOn
+            ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)
+            : "",
+        userDelegationKeyCredential.userDelegationKey.signedExpiresOn
+            ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)
+            : "",
+        userDelegationKeyCredential.userDelegationKey.signedService,
+        userDelegationKeyCredential.userDelegationKey.signedVersion,
+        blobSASSignatureValues.preauthorizedAgentObjectId,
+        undefined, // agentObjectId
+        blobSASSignatureValues.correlationId,
+        blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+        blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+        blobSASSignatureValues.version,
+        resource,
+        timestamp,
+        blobSASSignatureValues.encryptionScope,
+        blobSASSignatureValues.cacheControl,
+        blobSASSignatureValues.contentDisposition,
+        blobSASSignatureValues.contentEncoding,
+        blobSASSignatureValues.contentLanguage,
+        blobSASSignatureValues.contentType,
+    ].join("\n");
+    const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope),
+        stringToSign: stringToSign,
+    };
+}
+function getCanonicalName(accountName, containerName, blobName) {
+    // Container: "/blob/account/containerName"
+    // Blob:      "/blob/account/containerName/blobName"
+    const elements = [`/blob/${accountName}/${containerName}`];
+    if (blobName) {
+        elements.push(`/${blobName}`);
+    }
+    return elements.join("");
+}
+function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) {
+    const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
+    if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") {
+        throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'.");
+    }
+    if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {
+        throw RangeError("Must provide 'blobName' when providing 'snapshotTime'.");
+    }
+    if (blobSASSignatureValues.versionId && version < "2019-10-10") {
+        throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'.");
+    }
+    if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {
+        throw RangeError("Must provide 'blobName' when providing 'versionId'.");
+    }
+    if (blobSASSignatureValues.permissions &&
+        blobSASSignatureValues.permissions.setImmutabilityPolicy &&
+        version < "2020-08-04") {
+        throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission.");
+    }
+    if (blobSASSignatureValues.permissions &&
+        blobSASSignatureValues.permissions.deleteVersion &&
+        version < "2019-10-10") {
+        throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission.");
+    }
+    if (blobSASSignatureValues.permissions &&
+        blobSASSignatureValues.permissions.permanentDelete &&
+        version < "2019-10-10") {
+        throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission.");
+    }
+    if (blobSASSignatureValues.permissions &&
+        blobSASSignatureValues.permissions.tag &&
+        version < "2019-12-12") {
+        throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission.");
+    }
+    if (version < "2020-02-10" &&
+        blobSASSignatureValues.permissions &&
+        (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) {
+        throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.");
+    }
+    if (version < "2021-04-10" &&
+        blobSASSignatureValues.permissions &&
+        blobSASSignatureValues.permissions.filterByTags) {
+        throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission.");
+    }
+    if (version < "2020-02-10" &&
+        (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) {
+        throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.");
+    }
+    if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") {
+        throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.");
+    }
+    blobSASSignatureValues.version = version;
+    return blobSASSignatureValues;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.
+ */
+class BlobLeaseClient {
+    /**
+     * Gets the lease Id.
+     *
+     * @readonly
+     */
+    get leaseId() {
+        return this._leaseId;
+    }
+    /**
+     * Gets the url.
+     *
+     * @readonly
+     */
+    get url() {
+        return this._url;
+    }
+    /**
+     * Creates an instance of BlobLeaseClient.
+     * @param client - The client to make the lease operation requests.
+     * @param leaseId - Initial proposed lease id.
+     */
+    constructor(client, leaseId) {
+        const clientContext = client.storageClientContext;
+        this._url = client.url;
+        if (client.name === undefined) {
+            this._isContainer = true;
+            this._containerOrBlobOperation = clientContext.container;
+        }
+        else {
+            this._isContainer = false;
+            this._containerOrBlobOperation = clientContext.blob;
+        }
+        if (!leaseId) {
+            leaseId = coreUtil.randomUUID();
+        }
+        this._leaseId = leaseId;
+    }
+    /**
+     * Establishes and manages a lock on a container for delete operations, or on a blob
+     * for write and delete operations.
+     * The lock duration can be 15 to 60 seconds, or can be infinite.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+     * and
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+     *
+     * @param duration - Must be between 15 to 60 seconds, or infinite (-1)
+     * @param options - option to configure lease management operations.
+     * @returns Response data for acquire lease operation.
+     */
+    async acquireLease(duration, options = {}) {
+        var _a, _b, _c, _d, _e;
+        if (this._isContainer &&
+            ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+                (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
+                ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+            throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+        }
+        return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this._containerOrBlobOperation.acquireLease({
+                abortSignal: options.abortSignal,
+                duration,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                proposedLeaseId: this._leaseId,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * To change the ID of the lease.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+     * and
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+     *
+     * @param proposedLeaseId - the proposed new lease Id.
+     * @param options - option to configure lease management operations.
+     * @returns Response data for change lease operation.
+     */
+    async changeLease(proposedLeaseId, options = {}) {
+        var _a, _b, _c, _d, _e;
+        if (this._isContainer &&
+            ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+                (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
+                ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+            throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+        }
+        return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => {
+            var _a;
+            const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, {
+                abortSignal: options.abortSignal,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            this._leaseId = proposedLeaseId;
+            return response;
+        });
+    }
+    /**
+     * To free the lease if it is no longer needed so that another client may
+     * immediately acquire a lease against the container or the blob.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+     * and
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+     *
+     * @param options - option to configure lease management operations.
+     * @returns Response data for release lease operation.
+     */
+    async releaseLease(options = {}) {
+        var _a, _b, _c, _d, _e;
+        if (this._isContainer &&
+            ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+                (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
+                ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+            throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+        }
+        return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, {
+                abortSignal: options.abortSignal,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * To renew the lease.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+     * and
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+     *
+     * @param options - Optional option to configure lease management operations.
+     * @returns Response data for renew lease operation.
+     */
+    async renewLease(options = {}) {
+        var _a, _b, _c, _d, _e;
+        if (this._isContainer &&
+            ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+                (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
+                ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+            throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+        }
+        return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => {
+            var _a;
+            return this._containerOrBlobOperation.renewLease(this._leaseId, {
+                abortSignal: options.abortSignal,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            });
+        });
+    }
+    /**
+     * To end the lease but ensure that another client cannot acquire a new lease
+     * until the current lease period has expired.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+     * and
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+     *
+     * @param breakPeriod - Break period
+     * @param options - Optional options to configure lease management operations.
+     * @returns Response data for break lease operation.
+     */
+    async breakLease(breakPeriod, options = {}) {
+        var _a, _b, _c, _d, _e;
+        if (this._isContainer &&
+            ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+                (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) ||
+                ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+            throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+        }
+        return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => {
+            var _a;
+            const operationOptions = {
+                abortSignal: options.abortSignal,
+                breakPeriod,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            };
+            return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions));
+        });
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.
+ */
+class RetriableReadableStream extends stream.Readable {
+    /**
+     * Creates an instance of RetriableReadableStream.
+     *
+     * @param source - The current ReadableStream returned from getter
+     * @param getter - A method calling downloading request returning
+     *                                      a new ReadableStream from specified offset
+     * @param offset - Offset position in original data source to read
+     * @param count - How much data in original data source to read
+     * @param options -
+     */
+    constructor(source, getter, offset, count, options = {}) {
+        super({ highWaterMark: options.highWaterMark });
+        this.retries = 0;
+        this.sourceDataHandler = (data) => {
+            if (this.options.doInjectErrorOnce) {
+                this.options.doInjectErrorOnce = undefined;
+                this.source.pause();
+                this.sourceErrorOrEndHandler();
+                this.source.destroy();
+                return;
+            }
+            // console.log(
+            //   `Offset: ${this.offset}, Received ${data.length} from internal stream`
+            // );
+            this.offset += data.length;
+            if (this.onProgress) {
+                this.onProgress({ loadedBytes: this.offset - this.start });
+            }
+            if (!this.push(data)) {
+                this.source.pause();
+            }
+        };
+        this.sourceAbortedHandler = () => {
+            const abortError = new abortController.AbortError("The operation was aborted.");
+            this.destroy(abortError);
+        };
+        this.sourceErrorOrEndHandler = (err) => {
+            if (err && err.name === "AbortError") {
+                this.destroy(err);
+                return;
+            }
+            // console.log(
+            //   `Source stream emits end or error, offset: ${
+            //     this.offset
+            //   }, dest end : ${this.end}`
+            // );
+            this.removeSourceEventHandlers();
+            if (this.offset - 1 === this.end) {
+                this.push(null);
+            }
+            else if (this.offset <= this.end) {
+                // console.log(
+                //   `retries: ${this.retries}, max retries: ${this.maxRetries}`
+                // );
+                if (this.retries < this.maxRetryRequests) {
+                    this.retries += 1;
+                    this.getter(this.offset)
+                        .then((newSource) => {
+                        this.source = newSource;
+                        this.setSourceEventHandlers();
+                        return;
+                    })
+                        .catch((error) => {
+                        this.destroy(error);
+                    });
+                }
+                else {
+                    this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`));
+                }
+            }
+            else {
+                this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`));
+            }
+        };
+        this.getter = getter;
+        this.source = source;
+        this.start = offset;
+        this.offset = offset;
+        this.end = offset + count - 1;
+        this.maxRetryRequests =
+            options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;
+        this.onProgress = options.onProgress;
+        this.options = options;
+        this.setSourceEventHandlers();
+    }
+    _read() {
+        this.source.resume();
+    }
+    setSourceEventHandlers() {
+        this.source.on("data", this.sourceDataHandler);
+        this.source.on("end", this.sourceErrorOrEndHandler);
+        this.source.on("error", this.sourceErrorOrEndHandler);
+        // needed for Node14
+        this.source.on("aborted", this.sourceAbortedHandler);
+    }
+    removeSourceEventHandlers() {
+        this.source.removeListener("data", this.sourceDataHandler);
+        this.source.removeListener("end", this.sourceErrorOrEndHandler);
+        this.source.removeListener("error", this.sourceErrorOrEndHandler);
+        this.source.removeListener("aborted", this.sourceAbortedHandler);
+    }
+    _destroy(error, callback) {
+        // remove listener from source and release source
+        this.removeSourceEventHandlers();
+        this.source.destroy();
+        callback(error === null ? undefined : error);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will
+ * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot
+ * trigger retries defined in pipeline retry policy.)
+ *
+ * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js
+ * Readable stream.
+ */
+class BlobDownloadResponse {
+    /**
+     * Indicates that the service supports
+     * requests for partial file content.
+     *
+     * @readonly
+     */
+    get acceptRanges() {
+        return this.originalResponse.acceptRanges;
+    }
+    /**
+     * Returns if it was previously specified
+     * for the file.
+     *
+     * @readonly
+     */
+    get cacheControl() {
+        return this.originalResponse.cacheControl;
+    }
+    /**
+     * Returns the value that was specified
+     * for the 'x-ms-content-disposition' header and specifies how to process the
+     * response.
+     *
+     * @readonly
+     */
+    get contentDisposition() {
+        return this.originalResponse.contentDisposition;
+    }
+    /**
+     * Returns the value that was specified
+     * for the Content-Encoding request header.
+     *
+     * @readonly
+     */
+    get contentEncoding() {
+        return this.originalResponse.contentEncoding;
+    }
+    /**
+     * Returns the value that was specified
+     * for the Content-Language request header.
+     *
+     * @readonly
+     */
+    get contentLanguage() {
+        return this.originalResponse.contentLanguage;
+    }
+    /**
+     * The current sequence number for a
+     * page blob. This header is not returned for block blobs or append blobs.
+     *
+     * @readonly
+     */
+    get blobSequenceNumber() {
+        return this.originalResponse.blobSequenceNumber;
+    }
+    /**
+     * The blob's type. Possible values include:
+     * 'BlockBlob', 'PageBlob', 'AppendBlob'.
+     *
+     * @readonly
+     */
+    get blobType() {
+        return this.originalResponse.blobType;
+    }
+    /**
+     * The number of bytes present in the
+     * response body.
+     *
+     * @readonly
+     */
+    get contentLength() {
+        return this.originalResponse.contentLength;
+    }
+    /**
+     * If the file has an MD5 hash and the
+     * request is to read the full file, this response header is returned so that
+     * the client can check for message content integrity. If the request is to
+     * read a specified range and the 'x-ms-range-get-content-md5' is set to
+     * true, then the request returns an MD5 hash for the range, as long as the
+     * range size is less than or equal to 4 MB. If neither of these sets of
+     * conditions is true, then no value is returned for the 'Content-MD5'
+     * header.
+     *
+     * @readonly
+     */
+    get contentMD5() {
+        return this.originalResponse.contentMD5;
+    }
+    /**
+     * Indicates the range of bytes returned if
+     * the client requested a subset of the file by setting the Range request
+     * header.
+     *
+     * @readonly
+     */
+    get contentRange() {
+        return this.originalResponse.contentRange;
+    }
+    /**
+     * The content type specified for the file.
+     * The default content type is 'application/octet-stream'
+     *
+     * @readonly
+     */
+    get contentType() {
+        return this.originalResponse.contentType;
+    }
+    /**
+     * Conclusion time of the last attempted
+     * Copy File operation where this file was the destination file. This value
+     * can specify the time of a completed, aborted, or failed copy attempt.
+     *
+     * @readonly
+     */
+    get copyCompletedOn() {
+        return this.originalResponse.copyCompletedOn;
+    }
+    /**
+     * String identifier for the last attempted Copy
+     * File operation where this file was the destination file.
+     *
+     * @readonly
+     */
+    get copyId() {
+        return this.originalResponse.copyId;
+    }
+    /**
+     * Contains the number of bytes copied and
+     * the total bytes in the source in the last attempted Copy File operation
+     * where this file was the destination file. Can show between 0 and
+     * Content-Length bytes copied.
+     *
+     * @readonly
+     */
+    get copyProgress() {
+        return this.originalResponse.copyProgress;
+    }
+    /**
+     * URL up to 2KB in length that specifies the
+     * source file used in the last attempted Copy File operation where this file
+     * was the destination file.
+     *
+     * @readonly
+     */
+    get copySource() {
+        return this.originalResponse.copySource;
+    }
+    /**
+     * State of the copy operation
+     * identified by 'x-ms-copy-id'. Possible values include: 'pending',
+     * 'success', 'aborted', 'failed'
+     *
+     * @readonly
+     */
+    get copyStatus() {
+        return this.originalResponse.copyStatus;
+    }
+    /**
+     * Only appears when
+     * x-ms-copy-status is failed or pending. Describes cause of fatal or
+     * non-fatal copy operation failure.
+     *
+     * @readonly
+     */
+    get copyStatusDescription() {
+        return this.originalResponse.copyStatusDescription;
+    }
+    /**
+     * When a blob is leased,
+     * specifies whether the lease is of infinite or fixed duration. Possible
+     * values include: 'infinite', 'fixed'.
+     *
+     * @readonly
+     */
+    get leaseDuration() {
+        return this.originalResponse.leaseDuration;
+    }
+    /**
+     * Lease state of the blob. Possible
+     * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.
+     *
+     * @readonly
+     */
+    get leaseState() {
+        return this.originalResponse.leaseState;
+    }
+    /**
+     * The current lease status of the
+     * blob. Possible values include: 'locked', 'unlocked'.
+     *
+     * @readonly
+     */
+    get leaseStatus() {
+        return this.originalResponse.leaseStatus;
+    }
+    /**
+     * A UTC date/time value generated by the service that
+     * indicates the time at which the response was initiated.
+     *
+     * @readonly
+     */
+    get date() {
+        return this.originalResponse.date;
+    }
+    /**
+     * The number of committed blocks
+     * present in the blob. This header is returned only for append blobs.
+     *
+     * @readonly
+     */
+    get blobCommittedBlockCount() {
+        return this.originalResponse.blobCommittedBlockCount;
+    }
+    /**
+     * The ETag contains a value that you can use to
+     * perform operations conditionally, in quotes.
+     *
+     * @readonly
+     */
+    get etag() {
+        return this.originalResponse.etag;
+    }
+    /**
+     * The number of tags associated with the blob
+     *
+     * @readonly
+     */
+    get tagCount() {
+        return this.originalResponse.tagCount;
+    }
+    /**
+     * The error code.
+     *
+     * @readonly
+     */
+    get errorCode() {
+        return this.originalResponse.errorCode;
+    }
+    /**
+     * The value of this header is set to
+     * true if the file data and application metadata are completely encrypted
+     * using the specified algorithm. Otherwise, the value is set to false (when
+     * the file is unencrypted, or if only parts of the file/application metadata
+     * are encrypted).
+     *
+     * @readonly
+     */
+    get isServerEncrypted() {
+        return this.originalResponse.isServerEncrypted;
+    }
+    /**
+     * If the blob has a MD5 hash, and if
+     * request contains range header (Range or x-ms-range), this response header
+     * is returned with the value of the whole blob's MD5 value. This value may
+     * or may not be equal to the value returned in Content-MD5 header, with the
+     * latter calculated from the requested range.
+     *
+     * @readonly
+     */
+    get blobContentMD5() {
+        return this.originalResponse.blobContentMD5;
+    }
+    /**
+     * Returns the date and time the file was last
+     * modified. Any operation that modifies the file or its properties updates
+     * the last modified time.
+     *
+     * @readonly
+     */
+    get lastModified() {
+        return this.originalResponse.lastModified;
+    }
+    /**
+     * Returns the UTC date and time generated by the service that indicates the time at which the blob was
+     * last read or written to.
+     *
+     * @readonly
+     */
+    get lastAccessed() {
+        return this.originalResponse.lastAccessed;
+    }
+    /**
+     * Returns the date and time the blob was created.
+     *
+     * @readonly
+     */
+    get createdOn() {
+        return this.originalResponse.createdOn;
+    }
+    /**
+     * A name-value pair
+     * to associate with a file storage object.
+     *
+     * @readonly
+     */
+    get metadata() {
+        return this.originalResponse.metadata;
+    }
+    /**
+     * This header uniquely identifies the request
+     * that was made and can be used for troubleshooting the request.
+     *
+     * @readonly
+     */
+    get requestId() {
+        return this.originalResponse.requestId;
+    }
+    /**
+     * If a client request id header is sent in the request, this header will be present in the
+     * response with the same value.
+     *
+     * @readonly
+     */
+    get clientRequestId() {
+        return this.originalResponse.clientRequestId;
+    }
+    /**
+     * Indicates the version of the Blob service used
+     * to execute the request.
+     *
+     * @readonly
+     */
+    get version() {
+        return this.originalResponse.version;
+    }
+    /**
+     * Indicates the versionId of the downloaded blob version.
+     *
+     * @readonly
+     */
+    get versionId() {
+        return this.originalResponse.versionId;
+    }
+    /**
+     * Indicates whether version of this blob is a current version.
+     *
+     * @readonly
+     */
+    get isCurrentVersion() {
+        return this.originalResponse.isCurrentVersion;
+    }
+    /**
+     * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned
+     * when the blob was encrypted with a customer-provided key.
+     *
+     * @readonly
+     */
+    get encryptionKeySha256() {
+        return this.originalResponse.encryptionKeySha256;
+    }
+    /**
+     * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to
+     * true, then the request returns a crc64 for the range, as long as the range size is less than
+     * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is
+     * specified in the same request, it will fail with 400(Bad Request)
+     */
+    get contentCrc64() {
+        return this.originalResponse.contentCrc64;
+    }
+    /**
+     * Object Replication Policy Id of the destination blob.
+     *
+     * @readonly
+     */
+    get objectReplicationDestinationPolicyId() {
+        return this.originalResponse.objectReplicationDestinationPolicyId;
+    }
+    /**
+     * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.
+     *
+     * @readonly
+     */
+    get objectReplicationSourceProperties() {
+        return this.originalResponse.objectReplicationSourceProperties;
+    }
+    /**
+     * If this blob has been sealed.
+     *
+     * @readonly
+     */
+    get isSealed() {
+        return this.originalResponse.isSealed;
+    }
+    /**
+     * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.
+     *
+     * @readonly
+     */
+    get immutabilityPolicyExpiresOn() {
+        return this.originalResponse.immutabilityPolicyExpiresOn;
+    }
+    /**
+     * Indicates immutability policy mode.
+     *
+     * @readonly
+     */
+    get immutabilityPolicyMode() {
+        return this.originalResponse.immutabilityPolicyMode;
+    }
+    /**
+     * Indicates if a legal hold is present on the blob.
+     *
+     * @readonly
+     */
+    get legalHold() {
+        return this.originalResponse.legalHold;
+    }
+    /**
+     * The response body as a browser Blob.
+     * Always undefined in node.js.
+     *
+     * @readonly
+     */
+    get contentAsBlob() {
+        return this.originalResponse.blobBody;
+    }
+    /**
+     * The response body as a node.js Readable stream.
+     * Always undefined in the browser.
+     *
+     * It will automatically retry when internal read stream unexpected ends.
+     *
+     * @readonly
+     */
+    get readableStreamBody() {
+        return coreUtil.isNode ? this.blobDownloadStream : undefined;
+    }
+    /**
+     * The HTTP response.
+     */
+    get _response() {
+        return this.originalResponse._response;
+    }
+    /**
+     * Creates an instance of BlobDownloadResponse.
+     *
+     * @param originalResponse -
+     * @param getter -
+     * @param offset -
+     * @param count -
+     * @param options -
+     */
+    constructor(originalResponse, getter, offset, count, options = {}) {
+        this.originalResponse = originalResponse;
+        this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+const AVRO_SYNC_MARKER_SIZE = 16;
+const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]);
+const AVRO_CODEC_KEY = "avro.codec";
+const AVRO_SCHEMA_KEY = "avro.schema";
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+class AvroParser {
+    /**
+     * Reads a fixed number of bytes from the stream.
+     *
+     * @param stream -
+     * @param length -
+     * @param options -
+     */
+    static async readFixedBytes(stream, length, options = {}) {
+        const bytes = await stream.read(length, { abortSignal: options.abortSignal });
+        if (bytes.length !== length) {
+            throw new Error("Hit stream end.");
+        }
+        return bytes;
+    }
+    /**
+     * Reads a single byte from the stream.
+     *
+     * @param stream -
+     * @param options -
+     */
+    static async readByte(stream, options = {}) {
+        const buf = await AvroParser.readFixedBytes(stream, 1, options);
+        return buf[0];
+    }
+    // int and long are stored in variable-length zig-zag coding.
+    // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt
+    // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types
+    static async readZigZagLong(stream, options = {}) {
+        let zigZagEncoded = 0;
+        let significanceInBit = 0;
+        let byte, haveMoreByte, significanceInFloat;
+        do {
+            byte = await AvroParser.readByte(stream, options);
+            haveMoreByte = byte & 0x80;
+            zigZagEncoded |= (byte & 0x7f) << significanceInBit;
+            significanceInBit += 7;
+        } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers
+        if (haveMoreByte) {
+            // Switch to float arithmetic
+            // eslint-disable-next-line no-self-assign
+            zigZagEncoded = zigZagEncoded;
+            significanceInFloat = 268435456; // 2 ** 28.
+            do {
+                byte = await AvroParser.readByte(stream, options);
+                zigZagEncoded += (byte & 0x7f) * significanceInFloat;
+                significanceInFloat *= 128; // 2 ** 7
+            } while (byte & 0x80);
+            const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;
+            if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {
+                throw new Error("Integer overflow.");
+            }
+            return res;
+        }
+        return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);
+    }
+    static async readLong(stream, options = {}) {
+        return AvroParser.readZigZagLong(stream, options);
+    }
+    static async readInt(stream, options = {}) {
+        return AvroParser.readZigZagLong(stream, options);
+    }
+    static async readNull() {
+        return null;
+    }
+    static async readBoolean(stream, options = {}) {
+        const b = await AvroParser.readByte(stream, options);
+        if (b === 1) {
+            return true;
+        }
+        else if (b === 0) {
+            return false;
+        }
+        else {
+            throw new Error("Byte was not a boolean.");
+        }
+    }
+    static async readFloat(stream, options = {}) {
+        const u8arr = await AvroParser.readFixedBytes(stream, 4, options);
+        const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);
+        return view.getFloat32(0, true); // littleEndian = true
+    }
+    static async readDouble(stream, options = {}) {
+        const u8arr = await AvroParser.readFixedBytes(stream, 8, options);
+        const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);
+        return view.getFloat64(0, true); // littleEndian = true
+    }
+    static async readBytes(stream, options = {}) {
+        const size = await AvroParser.readLong(stream, options);
+        if (size < 0) {
+            throw new Error("Bytes size was negative.");
+        }
+        return stream.read(size, { abortSignal: options.abortSignal });
+    }
+    static async readString(stream, options = {}) {
+        const u8arr = await AvroParser.readBytes(stream, options);
+        const utf8decoder = new TextDecoder();
+        return utf8decoder.decode(u8arr);
+    }
+    static async readMapPair(stream, readItemMethod, options = {}) {
+        const key = await AvroParser.readString(stream, options);
+        // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.
+        const value = await readItemMethod(stream, options);
+        return { key, value };
+    }
+    static async readMap(stream, readItemMethod, options = {}) {
+        const readPairMethod = (s, opts = {}) => {
+            return AvroParser.readMapPair(s, readItemMethod, opts);
+        };
+        const pairs = await AvroParser.readArray(stream, readPairMethod, options);
+        const dict = {};
+        for (const pair of pairs) {
+            dict[pair.key] = pair.value;
+        }
+        return dict;
+    }
+    static async readArray(stream, readItemMethod, options = {}) {
+        const items = [];
+        for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) {
+            if (count < 0) {
+                // Ignore block sizes
+                await AvroParser.readLong(stream, options);
+                count = -count;
+            }
+            while (count--) {
+                const item = await readItemMethod(stream, options);
+                items.push(item);
+            }
+        }
+        return items;
+    }
+}
+var AvroComplex;
+(function (AvroComplex) {
+    AvroComplex["RECORD"] = "record";
+    AvroComplex["ENUM"] = "enum";
+    AvroComplex["ARRAY"] = "array";
+    AvroComplex["MAP"] = "map";
+    AvroComplex["UNION"] = "union";
+    AvroComplex["FIXED"] = "fixed";
+})(AvroComplex || (AvroComplex = {}));
+var AvroPrimitive;
+(function (AvroPrimitive) {
+    AvroPrimitive["NULL"] = "null";
+    AvroPrimitive["BOOLEAN"] = "boolean";
+    AvroPrimitive["INT"] = "int";
+    AvroPrimitive["LONG"] = "long";
+    AvroPrimitive["FLOAT"] = "float";
+    AvroPrimitive["DOUBLE"] = "double";
+    AvroPrimitive["BYTES"] = "bytes";
+    AvroPrimitive["STRING"] = "string";
+})(AvroPrimitive || (AvroPrimitive = {}));
+class AvroType {
+    /**
+     * Determines the AvroType from the Avro Schema.
+     */
+    // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types
+    static fromSchema(schema) {
+        if (typeof schema === "string") {
+            return AvroType.fromStringSchema(schema);
+        }
+        else if (Array.isArray(schema)) {
+            return AvroType.fromArraySchema(schema);
+        }
+        else {
+            return AvroType.fromObjectSchema(schema);
+        }
+    }
+    static fromStringSchema(schema) {
+        switch (schema) {
+            case AvroPrimitive.NULL:
+            case AvroPrimitive.BOOLEAN:
+            case AvroPrimitive.INT:
+            case AvroPrimitive.LONG:
+            case AvroPrimitive.FLOAT:
+            case AvroPrimitive.DOUBLE:
+            case AvroPrimitive.BYTES:
+            case AvroPrimitive.STRING:
+                return new AvroPrimitiveType(schema);
+            default:
+                throw new Error(`Unexpected Avro type ${schema}`);
+        }
+    }
+    static fromArraySchema(schema) {
+        return new AvroUnionType(schema.map(AvroType.fromSchema));
+    }
+    static fromObjectSchema(schema) {
+        const type = schema.type;
+        // Primitives can be defined as strings or objects
+        try {
+            return AvroType.fromStringSchema(type);
+        }
+        catch (_a) {
+            // no-op
+        }
+        switch (type) {
+            case AvroComplex.RECORD:
+                if (schema.aliases) {
+                    throw new Error(`aliases currently is not supported, schema: ${schema}`);
+                }
+                if (!schema.name) {
+                    throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);
+                }
+                // eslint-disable-next-line no-case-declarations
+                const fields = {};
+                if (!schema.fields) {
+                    throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);
+                }
+                for (const field of schema.fields) {
+                    fields[field.name] = AvroType.fromSchema(field.type);
+                }
+                return new AvroRecordType(fields, schema.name);
+            case AvroComplex.ENUM:
+                if (schema.aliases) {
+                    throw new Error(`aliases currently is not supported, schema: ${schema}`);
+                }
+                if (!schema.symbols) {
+                    throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);
+                }
+                return new AvroEnumType(schema.symbols);
+            case AvroComplex.MAP:
+                if (!schema.values) {
+                    throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);
+                }
+                return new AvroMapType(AvroType.fromSchema(schema.values));
+            case AvroComplex.ARRAY: // Unused today
+            case AvroComplex.FIXED: // Unused today
+            default:
+                throw new Error(`Unexpected Avro type ${type} in ${schema}`);
+        }
+    }
+}
+class AvroPrimitiveType extends AvroType {
+    constructor(primitive) {
+        super();
+        this._primitive = primitive;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types
+    read(stream, options = {}) {
+        switch (this._primitive) {
+            case AvroPrimitive.NULL:
+                return AvroParser.readNull();
+            case AvroPrimitive.BOOLEAN:
+                return AvroParser.readBoolean(stream, options);
+            case AvroPrimitive.INT:
+                return AvroParser.readInt(stream, options);
+            case AvroPrimitive.LONG:
+                return AvroParser.readLong(stream, options);
+            case AvroPrimitive.FLOAT:
+                return AvroParser.readFloat(stream, options);
+            case AvroPrimitive.DOUBLE:
+                return AvroParser.readDouble(stream, options);
+            case AvroPrimitive.BYTES:
+                return AvroParser.readBytes(stream, options);
+            case AvroPrimitive.STRING:
+                return AvroParser.readString(stream, options);
+            default:
+                throw new Error("Unknown Avro Primitive");
+        }
+    }
+}
+class AvroEnumType extends AvroType {
+    constructor(symbols) {
+        super();
+        this._symbols = symbols;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types
+    async read(stream, options = {}) {
+        const value = await AvroParser.readInt(stream, options);
+        return this._symbols[value];
+    }
+}
+class AvroUnionType extends AvroType {
+    constructor(types) {
+        super();
+        this._types = types;
+    }
+    async read(stream, options = {}) {
+        const typeIndex = await AvroParser.readInt(stream, options);
+        return this._types[typeIndex].read(stream, options);
+    }
+}
+class AvroMapType extends AvroType {
+    constructor(itemType) {
+        super();
+        this._itemType = itemType;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types
+    read(stream, options = {}) {
+        const readItemMethod = (s, opts) => {
+            return this._itemType.read(s, opts);
+        };
+        return AvroParser.readMap(stream, readItemMethod, options);
+    }
+}
+class AvroRecordType extends AvroType {
+    constructor(fields, name) {
+        super();
+        this._fields = fields;
+        this._name = name;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types
+    async read(stream, options = {}) {
+        // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types
+        const record = {};
+        record["$schema"] = this._name;
+        for (const key in this._fields) {
+            if (Object.prototype.hasOwnProperty.call(this._fields, key)) {
+                record[key] = await this._fields[key].read(stream, options);
+            }
+        }
+        return record;
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+function arraysEqual(a, b) {
+    if (a === b)
+        return true;
+    if (a == null || b == null)
+        return false;
+    if (a.length !== b.length)
+        return false;
+    for (let i = 0; i < a.length; ++i) {
+        if (a[i] !== b[i])
+            return false;
+    }
+    return true;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+class AvroReader {
+    get blockOffset() {
+        return this._blockOffset;
+    }
+    get objectIndex() {
+        return this._objectIndex;
+    }
+    constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) {
+        this._dataStream = dataStream;
+        this._headerStream = headerStream || dataStream;
+        this._initialized = false;
+        this._blockOffset = currentBlockOffset || 0;
+        this._objectIndex = indexWithinCurrentBlock || 0;
+        this._initialBlockOffset = currentBlockOffset || 0;
+    }
+    async initialize(options = {}) {
+        const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {
+            abortSignal: options.abortSignal,
+        });
+        if (!arraysEqual(header, AVRO_INIT_BYTES)) {
+            throw new Error("Stream is not an Avro file.");
+        }
+        // File metadata is written as if defined by the following map schema:
+        // { "type": "map", "values": "bytes"}
+        this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {
+            abortSignal: options.abortSignal,
+        });
+        // Validate codec
+        const codec = this._metadata[AVRO_CODEC_KEY];
+        if (!(codec === undefined || codec === null || codec === "null")) {
+            throw new Error("Codecs are not supported");
+        }
+        // The 16-byte, randomly-generated sync marker for this file.
+        this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {
+            abortSignal: options.abortSignal,
+        });
+        // Parse the schema
+        const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]);
+        this._itemType = AvroType.fromSchema(schema);
+        if (this._blockOffset === 0) {
+            this._blockOffset = this._initialBlockOffset + this._dataStream.position;
+        }
+        this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {
+            abortSignal: options.abortSignal,
+        });
+        // skip block length
+        await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });
+        this._initialized = true;
+        if (this._objectIndex && this._objectIndex > 0) {
+            for (let i = 0; i < this._objectIndex; i++) {
+                await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });
+                this._itemsRemainingInBlock--;
+            }
+        }
+    }
+    hasNext() {
+        return !this._initialized || this._itemsRemainingInBlock > 0;
+    }
+    parseObjects() {
+        return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) {
+            if (!this._initialized) {
+                yield tslib.__await(this.initialize(options));
+            }
+            while (this.hasNext()) {
+                const result = yield tslib.__await(this._itemType.read(this._dataStream, {
+                    abortSignal: options.abortSignal,
+                }));
+                this._itemsRemainingInBlock--;
+                this._objectIndex++;
+                if (this._itemsRemainingInBlock === 0) {
+                    const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {
+                        abortSignal: options.abortSignal,
+                    }));
+                    this._blockOffset = this._initialBlockOffset + this._dataStream.position;
+                    this._objectIndex = 0;
+                    if (!arraysEqual(this._syncMarker, marker)) {
+                        throw new Error("Stream is not a valid Avro file.");
+                    }
+                    try {
+                        this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, {
+                            abortSignal: options.abortSignal,
+                        }));
+                    }
+                    catch (_a) {
+                        // We hit the end of the stream.
+                        this._itemsRemainingInBlock = 0;
+                    }
+                    if (this._itemsRemainingInBlock > 0) {
+                        // Ignore block size
+                        yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }));
+                    }
+                }
+                yield yield tslib.__await(result);
+            }
+        });
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+class AvroReadable {
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted.");
+class AvroReadableFromStream extends AvroReadable {
+    toUint8Array(data) {
+        if (typeof data === "string") {
+            return Buffer.from(data);
+        }
+        return data;
+    }
+    constructor(readable) {
+        super();
+        this._readable = readable;
+        this._position = 0;
+    }
+    get position() {
+        return this._position;
+    }
+    async read(size, options = {}) {
+        var _a;
+        if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) {
+            throw ABORT_ERROR;
+        }
+        if (size < 0) {
+            throw new Error(`size parameter should be positive: ${size}`);
+        }
+        if (size === 0) {
+            return new Uint8Array();
+        }
+        if (!this._readable.readable) {
+            throw new Error("Stream no longer readable.");
+        }
+        // See if there is already enough data.
+        const chunk = this._readable.read(size);
+        if (chunk) {
+            this._position += chunk.length;
+            // chunk.length maybe less than desired size if the stream ends.
+            return this.toUint8Array(chunk);
+        }
+        else {
+            // register callback to wait for enough data to read
+            return new Promise((resolve, reject) => {
+                /* eslint-disable @typescript-eslint/no-use-before-define */
+                const cleanUp = () => {
+                    this._readable.removeListener("readable", readableCallback);
+                    this._readable.removeListener("error", rejectCallback);
+                    this._readable.removeListener("end", rejectCallback);
+                    this._readable.removeListener("close", rejectCallback);
+                    if (options.abortSignal) {
+                        options.abortSignal.removeEventListener("abort", abortHandler);
+                    }
+                };
+                const readableCallback = () => {
+                    const callbackChunk = this._readable.read(size);
+                    if (callbackChunk) {
+                        this._position += callbackChunk.length;
+                        cleanUp();
+                        // callbackChunk.length maybe less than desired size if the stream ends.
+                        resolve(this.toUint8Array(callbackChunk));
+                    }
+                };
+                const rejectCallback = () => {
+                    cleanUp();
+                    reject();
+                };
+                const abortHandler = () => {
+                    cleanUp();
+                    reject(ABORT_ERROR);
+                };
+                this._readable.on("readable", readableCallback);
+                this._readable.once("error", rejectCallback);
+                this._readable.once("end", rejectCallback);
+                this._readable.once("close", rejectCallback);
+                if (options.abortSignal) {
+                    options.abortSignal.addEventListener("abort", abortHandler);
+                }
+                /* eslint-enable @typescript-eslint/no-use-before-define */
+            });
+        }
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.
+ */
+class BlobQuickQueryStream extends stream.Readable {
+    /**
+     * Creates an instance of BlobQuickQueryStream.
+     *
+     * @param source - The current ReadableStream returned from getter
+     * @param options -
+     */
+    constructor(source, options = {}) {
+        super();
+        this.avroPaused = true;
+        this.source = source;
+        this.onProgress = options.onProgress;
+        this.onError = options.onError;
+        this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));
+        this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });
+    }
+    _read() {
+        if (this.avroPaused) {
+            this.readInternal().catch((err) => {
+                this.emit("error", err);
+            });
+        }
+    }
+    async readInternal() {
+        this.avroPaused = false;
+        let avroNext;
+        do {
+            avroNext = await this.avroIter.next();
+            if (avroNext.done) {
+                break;
+            }
+            const obj = avroNext.value;
+            const schema = obj.$schema;
+            if (typeof schema !== "string") {
+                throw Error("Missing schema in avro record.");
+            }
+            switch (schema) {
+                case "com.microsoft.azure.storage.queryBlobContents.resultData":
+                    {
+                        const data = obj.data;
+                        if (data instanceof Uint8Array === false) {
+                            throw Error("Invalid data in avro result record.");
+                        }
+                        if (!this.push(Buffer.from(data))) {
+                            this.avroPaused = true;
+                        }
+                    }
+                    break;
+                case "com.microsoft.azure.storage.queryBlobContents.progress":
+                    {
+                        const bytesScanned = obj.bytesScanned;
+                        if (typeof bytesScanned !== "number") {
+                            throw Error("Invalid bytesScanned in avro progress record.");
+                        }
+                        if (this.onProgress) {
+                            this.onProgress({ loadedBytes: bytesScanned });
+                        }
+                    }
+                    break;
+                case "com.microsoft.azure.storage.queryBlobContents.end":
+                    if (this.onProgress) {
+                        const totalBytes = obj.totalBytes;
+                        if (typeof totalBytes !== "number") {
+                            throw Error("Invalid totalBytes in avro end record.");
+                        }
+                        this.onProgress({ loadedBytes: totalBytes });
+                    }
+                    this.push(null);
+                    break;
+                case "com.microsoft.azure.storage.queryBlobContents.error":
+                    if (this.onError) {
+                        const fatal = obj.fatal;
+                        if (typeof fatal !== "boolean") {
+                            throw Error("Invalid fatal in avro error record.");
+                        }
+                        const name = obj.name;
+                        if (typeof name !== "string") {
+                            throw Error("Invalid name in avro error record.");
+                        }
+                        const description = obj.description;
+                        if (typeof description !== "string") {
+                            throw Error("Invalid description in avro error record.");
+                        }
+                        const position = obj.position;
+                        if (typeof position !== "number") {
+                            throw Error("Invalid position in avro error record.");
+                        }
+                        this.onError({
+                            position,
+                            name,
+                            isFatal: fatal,
+                            description,
+                        });
+                    }
+                    break;
+                default:
+                    throw Error(`Unknown schema ${schema} in avro progress record.`);
+            }
+        } while (!avroNext.done && !this.avroPaused);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will
+ * parse avor data returned by blob query.
+ */
+class BlobQueryResponse {
+    /**
+     * Indicates that the service supports
+     * requests for partial file content.
+     *
+     * @readonly
+     */
+    get acceptRanges() {
+        return this.originalResponse.acceptRanges;
+    }
+    /**
+     * Returns if it was previously specified
+     * for the file.
+     *
+     * @readonly
+     */
+    get cacheControl() {
+        return this.originalResponse.cacheControl;
+    }
+    /**
+     * Returns the value that was specified
+     * for the 'x-ms-content-disposition' header and specifies how to process the
+     * response.
+     *
+     * @readonly
+     */
+    get contentDisposition() {
+        return this.originalResponse.contentDisposition;
+    }
+    /**
+     * Returns the value that was specified
+     * for the Content-Encoding request header.
+     *
+     * @readonly
+     */
+    get contentEncoding() {
+        return this.originalResponse.contentEncoding;
+    }
+    /**
+     * Returns the value that was specified
+     * for the Content-Language request header.
+     *
+     * @readonly
+     */
+    get contentLanguage() {
+        return this.originalResponse.contentLanguage;
+    }
+    /**
+     * The current sequence number for a
+     * page blob. This header is not returned for block blobs or append blobs.
+     *
+     * @readonly
+     */
+    get blobSequenceNumber() {
+        return this.originalResponse.blobSequenceNumber;
+    }
+    /**
+     * The blob's type. Possible values include:
+     * 'BlockBlob', 'PageBlob', 'AppendBlob'.
+     *
+     * @readonly
+     */
+    get blobType() {
+        return this.originalResponse.blobType;
+    }
+    /**
+     * The number of bytes present in the
+     * response body.
+     *
+     * @readonly
+     */
+    get contentLength() {
+        return this.originalResponse.contentLength;
+    }
+    /**
+     * If the file has an MD5 hash and the
+     * request is to read the full file, this response header is returned so that
+     * the client can check for message content integrity. If the request is to
+     * read a specified range and the 'x-ms-range-get-content-md5' is set to
+     * true, then the request returns an MD5 hash for the range, as long as the
+     * range size is less than or equal to 4 MB. If neither of these sets of
+     * conditions is true, then no value is returned for the 'Content-MD5'
+     * header.
+     *
+     * @readonly
+     */
+    get contentMD5() {
+        return this.originalResponse.contentMD5;
+    }
+    /**
+     * Indicates the range of bytes returned if
+     * the client requested a subset of the file by setting the Range request
+     * header.
+     *
+     * @readonly
+     */
+    get contentRange() {
+        return this.originalResponse.contentRange;
+    }
+    /**
+     * The content type specified for the file.
+     * The default content type is 'application/octet-stream'
+     *
+     * @readonly
+     */
+    get contentType() {
+        return this.originalResponse.contentType;
+    }
+    /**
+     * Conclusion time of the last attempted
+     * Copy File operation where this file was the destination file. This value
+     * can specify the time of a completed, aborted, or failed copy attempt.
+     *
+     * @readonly
+     */
+    get copyCompletedOn() {
+        return undefined;
+    }
+    /**
+     * String identifier for the last attempted Copy
+     * File operation where this file was the destination file.
+     *
+     * @readonly
+     */
+    get copyId() {
+        return this.originalResponse.copyId;
+    }
+    /**
+     * Contains the number of bytes copied and
+     * the total bytes in the source in the last attempted Copy File operation
+     * where this file was the destination file. Can show between 0 and
+     * Content-Length bytes copied.
+     *
+     * @readonly
+     */
+    get copyProgress() {
+        return this.originalResponse.copyProgress;
+    }
+    /**
+     * URL up to 2KB in length that specifies the
+     * source file used in the last attempted Copy File operation where this file
+     * was the destination file.
+     *
+     * @readonly
+     */
+    get copySource() {
+        return this.originalResponse.copySource;
+    }
+    /**
+     * State of the copy operation
+     * identified by 'x-ms-copy-id'. Possible values include: 'pending',
+     * 'success', 'aborted', 'failed'
+     *
+     * @readonly
+     */
+    get copyStatus() {
+        return this.originalResponse.copyStatus;
+    }
+    /**
+     * Only appears when
+     * x-ms-copy-status is failed or pending. Describes cause of fatal or
+     * non-fatal copy operation failure.
+     *
+     * @readonly
+     */
+    get copyStatusDescription() {
+        return this.originalResponse.copyStatusDescription;
+    }
+    /**
+     * When a blob is leased,
+     * specifies whether the lease is of infinite or fixed duration. Possible
+     * values include: 'infinite', 'fixed'.
+     *
+     * @readonly
+     */
+    get leaseDuration() {
+        return this.originalResponse.leaseDuration;
+    }
+    /**
+     * Lease state of the blob. Possible
+     * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.
+     *
+     * @readonly
+     */
+    get leaseState() {
+        return this.originalResponse.leaseState;
+    }
+    /**
+     * The current lease status of the
+     * blob. Possible values include: 'locked', 'unlocked'.
+     *
+     * @readonly
+     */
+    get leaseStatus() {
+        return this.originalResponse.leaseStatus;
+    }
+    /**
+     * A UTC date/time value generated by the service that
+     * indicates the time at which the response was initiated.
+     *
+     * @readonly
+     */
+    get date() {
+        return this.originalResponse.date;
+    }
+    /**
+     * The number of committed blocks
+     * present in the blob. This header is returned only for append blobs.
+     *
+     * @readonly
+     */
+    get blobCommittedBlockCount() {
+        return this.originalResponse.blobCommittedBlockCount;
+    }
+    /**
+     * The ETag contains a value that you can use to
+     * perform operations conditionally, in quotes.
+     *
+     * @readonly
+     */
+    get etag() {
+        return this.originalResponse.etag;
+    }
+    /**
+     * The error code.
+     *
+     * @readonly
+     */
+    get errorCode() {
+        return this.originalResponse.errorCode;
+    }
+    /**
+     * The value of this header is set to
+     * true if the file data and application metadata are completely encrypted
+     * using the specified algorithm. Otherwise, the value is set to false (when
+     * the file is unencrypted, or if only parts of the file/application metadata
+     * are encrypted).
+     *
+     * @readonly
+     */
+    get isServerEncrypted() {
+        return this.originalResponse.isServerEncrypted;
+    }
+    /**
+     * If the blob has a MD5 hash, and if
+     * request contains range header (Range or x-ms-range), this response header
+     * is returned with the value of the whole blob's MD5 value. This value may
+     * or may not be equal to the value returned in Content-MD5 header, with the
+     * latter calculated from the requested range.
+     *
+     * @readonly
+     */
+    get blobContentMD5() {
+        return this.originalResponse.blobContentMD5;
+    }
+    /**
+     * Returns the date and time the file was last
+     * modified. Any operation that modifies the file or its properties updates
+     * the last modified time.
+     *
+     * @readonly
+     */
+    get lastModified() {
+        return this.originalResponse.lastModified;
+    }
+    /**
+     * A name-value pair
+     * to associate with a file storage object.
+     *
+     * @readonly
+     */
+    get metadata() {
+        return this.originalResponse.metadata;
+    }
+    /**
+     * This header uniquely identifies the request
+     * that was made and can be used for troubleshooting the request.
+     *
+     * @readonly
+     */
+    get requestId() {
+        return this.originalResponse.requestId;
+    }
+    /**
+     * If a client request id header is sent in the request, this header will be present in the
+     * response with the same value.
+     *
+     * @readonly
+     */
+    get clientRequestId() {
+        return this.originalResponse.clientRequestId;
+    }
+    /**
+     * Indicates the version of the File service used
+     * to execute the request.
+     *
+     * @readonly
+     */
+    get version() {
+        return this.originalResponse.version;
+    }
+    /**
+     * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned
+     * when the blob was encrypted with a customer-provided key.
+     *
+     * @readonly
+     */
+    get encryptionKeySha256() {
+        return this.originalResponse.encryptionKeySha256;
+    }
+    /**
+     * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to
+     * true, then the request returns a crc64 for the range, as long as the range size is less than
+     * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is
+     * specified in the same request, it will fail with 400(Bad Request)
+     */
+    get contentCrc64() {
+        return this.originalResponse.contentCrc64;
+    }
+    /**
+     * The response body as a browser Blob.
+     * Always undefined in node.js.
+     *
+     * @readonly
+     */
+    get blobBody() {
+        return undefined;
+    }
+    /**
+     * The response body as a node.js Readable stream.
+     * Always undefined in the browser.
+     *
+     * It will parse avor data returned by blob query.
+     *
+     * @readonly
+     */
+    get readableStreamBody() {
+        return coreUtil.isNode ? this.blobDownloadStream : undefined;
+    }
+    /**
+     * The HTTP response.
+     */
+    get _response() {
+        return this.originalResponse._response;
+    }
+    /**
+     * Creates an instance of BlobQueryResponse.
+     *
+     * @param originalResponse -
+     * @param options -
+     */
+    constructor(originalResponse, options = {}) {
+        this.originalResponse = originalResponse;
+        this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Represents the access tier on a blob.
+ * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}
+ */
+exports.BlockBlobTier = void 0;
+(function (BlockBlobTier) {
+    /**
+     * Optimized for storing data that is accessed frequently.
+     */
+    BlockBlobTier["Hot"] = "Hot";
+    /**
+     * Optimized for storing data that is infrequently accessed and stored for at least 30 days.
+     */
+    BlockBlobTier["Cool"] = "Cool";
+    /**
+     * Optimized for storing data that is rarely accessed.
+     */
+    BlockBlobTier["Cold"] = "Cold";
+    /**
+     * Optimized for storing data that is rarely accessed and stored for at least 180 days
+     * with flexible latency requirements (on the order of hours).
+     */
+    BlockBlobTier["Archive"] = "Archive";
+})(exports.BlockBlobTier || (exports.BlockBlobTier = {}));
+/**
+ * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.
+ * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}
+ * for detailed information on the corresponding IOPS and throughput per PageBlobTier.
+ */
+exports.PremiumPageBlobTier = void 0;
+(function (PremiumPageBlobTier) {
+    /**
+     * P4 Tier.
+     */
+    PremiumPageBlobTier["P4"] = "P4";
+    /**
+     * P6 Tier.
+     */
+    PremiumPageBlobTier["P6"] = "P6";
+    /**
+     * P10 Tier.
+     */
+    PremiumPageBlobTier["P10"] = "P10";
+    /**
+     * P15 Tier.
+     */
+    PremiumPageBlobTier["P15"] = "P15";
+    /**
+     * P20 Tier.
+     */
+    PremiumPageBlobTier["P20"] = "P20";
+    /**
+     * P30 Tier.
+     */
+    PremiumPageBlobTier["P30"] = "P30";
+    /**
+     * P40 Tier.
+     */
+    PremiumPageBlobTier["P40"] = "P40";
+    /**
+     * P50 Tier.
+     */
+    PremiumPageBlobTier["P50"] = "P50";
+    /**
+     * P60 Tier.
+     */
+    PremiumPageBlobTier["P60"] = "P60";
+    /**
+     * P70 Tier.
+     */
+    PremiumPageBlobTier["P70"] = "P70";
+    /**
+     * P80 Tier.
+     */
+    PremiumPageBlobTier["P80"] = "P80";
+})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {}));
+function toAccessTier(tier) {
+    if (tier === undefined) {
+        return undefined;
+    }
+    return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).
+}
+function ensureCpkIfSpecified(cpk, isHttps) {
+    if (cpk && !isHttps) {
+        throw new RangeError("Customer-provided encryption key must be used over HTTPS.");
+    }
+    if (cpk && !cpk.encryptionAlgorithm) {
+        cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;
+    }
+}
+/**
+ * Defines the known cloud audiences for Storage.
+ */
+exports.StorageBlobAudience = void 0;
+(function (StorageBlobAudience) {
+    /**
+     * The OAuth scope to use to retrieve an AAD token for Azure Storage.
+     */
+    StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default";
+    /**
+     * The OAuth scope to use to retrieve an AAD token for Azure Disk.
+     */
+    StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default";
+})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {}));
+/**
+ *
+ * To get OAuth audience for a storage account for blob service.
+ */
+function getBlobServiceAccountAudience(storageAccountName) {
+    return `https://${storageAccountName}.blob.core.windows.net/.default`;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Function that converts PageRange and ClearRange to a common Range object.
+ * PageRange and ClearRange have start and end while Range offset and count
+ * this function normalizes to Range.
+ * @param response - Model PageBlob Range response
+ */
+function rangeResponseFromModel(response) {
+    const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({
+        offset: x.start,
+        count: x.end - x.start,
+    }));
+    const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({
+        offset: x.start,
+        count: x.end - x.start,
+    }));
+    return Object.assign(Object.assign({}, response), { pageRange,
+        clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: {
+                pageRange,
+                clearRange,
+            } }) });
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * This is the poller returned by {@link BlobClient.beginCopyFromURL}.
+ * This can not be instantiated directly outside of this package.
+ *
+ * @hidden
+ */
+class BlobBeginCopyFromUrlPoller extends coreLro.Poller {
+    constructor(options) {
+        const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options;
+        let state;
+        if (resumeFrom) {
+            state = JSON.parse(resumeFrom).state;
+        }
+        const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient,
+            copySource,
+            startCopyFromURLOptions }));
+        super(operation);
+        if (typeof onProgress === "function") {
+            this.onProgress(onProgress);
+        }
+        this.intervalInMs = intervalInMs;
+    }
+    delay() {
+        return coreUtil.delay(this.intervalInMs);
+    }
+}
+/**
+ * Note: Intentionally using function expression over arrow function expression
+ * so that the function can be invoked with a different context.
+ * This affects what `this` refers to.
+ * @hidden
+ */
+const cancel = async function cancel(options = {}) {
+    const state = this.state;
+    const { copyId } = state;
+    if (state.isCompleted) {
+        return makeBlobBeginCopyFromURLPollOperation(state);
+    }
+    if (!copyId) {
+        state.isCancelled = true;
+        return makeBlobBeginCopyFromURLPollOperation(state);
+    }
+    // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call
+    await state.blobClient.abortCopyFromURL(copyId, {
+        abortSignal: options.abortSignal,
+    });
+    state.isCancelled = true;
+    return makeBlobBeginCopyFromURLPollOperation(state);
+};
+/**
+ * Note: Intentionally using function expression over arrow function expression
+ * so that the function can be invoked with a different context.
+ * This affects what `this` refers to.
+ * @hidden
+ */
+const update = async function update(options = {}) {
+    const state = this.state;
+    const { blobClient, copySource, startCopyFromURLOptions } = state;
+    if (!state.isStarted) {
+        state.isStarted = true;
+        const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);
+        // copyId is needed to abort
+        state.copyId = result.copyId;
+        if (result.copyStatus === "success") {
+            state.result = result;
+            state.isCompleted = true;
+        }
+    }
+    else if (!state.isCompleted) {
+        try {
+            const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });
+            const { copyStatus, copyProgress } = result;
+            const prevCopyProgress = state.copyProgress;
+            if (copyProgress) {
+                state.copyProgress = copyProgress;
+            }
+            if (copyStatus === "pending" &&
+                copyProgress !== prevCopyProgress &&
+                typeof options.fireProgress === "function") {
+                // trigger in setTimeout, or swallow error?
+                options.fireProgress(state);
+            }
+            else if (copyStatus === "success") {
+                state.result = result;
+                state.isCompleted = true;
+            }
+            else if (copyStatus === "failed") {
+                state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`);
+                state.isCompleted = true;
+            }
+        }
+        catch (err) {
+            state.error = err;
+            state.isCompleted = true;
+        }
+    }
+    return makeBlobBeginCopyFromURLPollOperation(state);
+};
+/**
+ * Note: Intentionally using function expression over arrow function expression
+ * so that the function can be invoked with a different context.
+ * This affects what `this` refers to.
+ * @hidden
+ */
+const toString = function toString() {
+    return JSON.stringify({ state: this.state }, (key, value) => {
+        // remove blobClient from serialized state since a client can't be hydrated from this info.
+        if (key === "blobClient") {
+            return undefined;
+        }
+        return value;
+    });
+};
+/**
+ * Creates a poll operation given the provided state.
+ * @hidden
+ */
+function makeBlobBeginCopyFromURLPollOperation(state) {
+    return {
+        state: Object.assign({}, state),
+        cancel,
+        toString,
+        update,
+    };
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Generate a range string. For example:
+ *
+ * "bytes=255-" or "bytes=0-511"
+ *
+ * @param iRange -
+ */
+function rangeToString(iRange) {
+    if (iRange.offset < 0) {
+        throw new RangeError(`Range.offset cannot be smaller than 0.`);
+    }
+    if (iRange.count && iRange.count <= 0) {
+        throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`);
+    }
+    return iRange.count
+        ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`
+        : `bytes=${iRange.offset}-`;
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+// In browser, during webpack or browserify bundling, this module will be replaced by 'events'
+// https://github.com/Gozala/events
+/**
+ * States for Batch.
+ */
+var BatchStates;
+(function (BatchStates) {
+    BatchStates[BatchStates["Good"] = 0] = "Good";
+    BatchStates[BatchStates["Error"] = 1] = "Error";
+})(BatchStates || (BatchStates = {}));
+/**
+ * Batch provides basic parallel execution with concurrency limits.
+ * Will stop execute left operations when one of the executed operation throws an error.
+ * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.
+ */
+class Batch {
+    /**
+     * Creates an instance of Batch.
+     * @param concurrency -
+     */
+    constructor(concurrency = 5) {
+        /**
+         * Number of active operations under execution.
+         */
+        this.actives = 0;
+        /**
+         * Number of completed operations under execution.
+         */
+        this.completed = 0;
+        /**
+         * Offset of next operation to be executed.
+         */
+        this.offset = 0;
+        /**
+         * Operation array to be executed.
+         */
+        this.operations = [];
+        /**
+         * States of Batch. When an error happens, state will turn into error.
+         * Batch will stop execute left operations.
+         */
+        this.state = BatchStates.Good;
+        if (concurrency < 1) {
+            throw new RangeError("concurrency must be larger than 0");
+        }
+        this.concurrency = concurrency;
+        this.emitter = new events.EventEmitter();
+    }
+    /**
+     * Add a operation into queue.
+     *
+     * @param operation -
+     */
+    addOperation(operation) {
+        this.operations.push(async () => {
+            try {
+                this.actives++;
+                await operation();
+                this.actives--;
+                this.completed++;
+                this.parallelExecute();
+            }
+            catch (error) {
+                this.emitter.emit("error", error);
+            }
+        });
+    }
+    /**
+     * Start execute operations in the queue.
+     *
+     */
+    async do() {
+        if (this.operations.length === 0) {
+            return Promise.resolve();
+        }
+        this.parallelExecute();
+        return new Promise((resolve, reject) => {
+            this.emitter.on("finish", resolve);
+            this.emitter.on("error", (error) => {
+                this.state = BatchStates.Error;
+                reject(error);
+            });
+        });
+    }
+    /**
+     * Get next operation to be executed. Return null when reaching ends.
+     *
+     */
+    nextOperation() {
+        if (this.offset < this.operations.length) {
+            return this.operations[this.offset++];
+        }
+        return null;
+    }
+    /**
+     * Start execute operations. One one the most important difference between
+     * this method with do() is that do() wraps as an sync method.
+     *
+     */
+    parallelExecute() {
+        if (this.state === BatchStates.Error) {
+            return;
+        }
+        if (this.completed >= this.operations.length) {
+            this.emitter.emit("finish");
+            return;
+        }
+        while (this.actives < this.concurrency) {
+            const operation = this.nextOperation();
+            if (operation) {
+                operation();
+            }
+            else {
+                return;
+            }
+        }
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * This class generates a readable stream from the data in an array of buffers.
+ */
+class BuffersStream extends stream.Readable {
+    /**
+     * Creates an instance of BuffersStream that will emit the data
+     * contained in the array of buffers.
+     *
+     * @param buffers - Array of buffers containing the data
+     * @param byteLength - The total length of data contained in the buffers
+     */
+    constructor(buffers, byteLength, options) {
+        super(options);
+        this.buffers = buffers;
+        this.byteLength = byteLength;
+        this.byteOffsetInCurrentBuffer = 0;
+        this.bufferIndex = 0;
+        this.pushedBytesLength = 0;
+        // check byteLength is no larger than buffers[] total length
+        let buffersLength = 0;
+        for (const buf of this.buffers) {
+            buffersLength += buf.byteLength;
+        }
+        if (buffersLength < this.byteLength) {
+            throw new Error("Data size shouldn't be larger than the total length of buffers.");
+        }
+    }
+    /**
+     * Internal _read() that will be called when the stream wants to pull more data in.
+     *
+     * @param size - Optional. The size of data to be read
+     */
+    _read(size) {
+        if (this.pushedBytesLength >= this.byteLength) {
+            this.push(null);
+        }
+        if (!size) {
+            size = this.readableHighWaterMark;
+        }
+        const outBuffers = [];
+        let i = 0;
+        while (i < size && this.pushedBytesLength < this.byteLength) {
+            // The last buffer may be longer than the data it contains.
+            const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;
+            const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;
+            const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);
+            if (remaining > size - i) {
+                // chunkSize = size - i
+                const end = this.byteOffsetInCurrentBuffer + size - i;
+                outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
+                this.pushedBytesLength += size - i;
+                this.byteOffsetInCurrentBuffer = end;
+                i = size;
+                break;
+            }
+            else {
+                // chunkSize = remaining
+                const end = this.byteOffsetInCurrentBuffer + remaining;
+                outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
+                if (remaining === remainingCapacityInThisBuffer) {
+                    // this.buffers[this.bufferIndex] used up, shift to next one
+                    this.byteOffsetInCurrentBuffer = 0;
+                    this.bufferIndex++;
+                }
+                else {
+                    this.byteOffsetInCurrentBuffer = end;
+                }
+                this.pushedBytesLength += remaining;
+                i += remaining;
+            }
+        }
+        if (outBuffers.length > 1) {
+            this.push(Buffer.concat(outBuffers));
+        }
+        else if (outBuffers.length === 1) {
+            this.push(outBuffers[0]);
+        }
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+const maxBufferLength = buffer.constants.MAX_LENGTH;
+/**
+ * This class provides a buffer container which conceptually has no hard size limit.
+ * It accepts a capacity, an array of input buffers and the total length of input data.
+ * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers
+ * into the internal "buffer" serially with respect to the total length.
+ * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream
+ * assembled from all the data in the internal "buffer".
+ */
+class PooledBuffer {
+    /**
+     * The size of the data contained in the pooled buffers.
+     */
+    get size() {
+        return this._size;
+    }
+    constructor(capacity, buffers, totalLength) {
+        /**
+         * Internal buffers used to keep the data.
+         * Each buffer has a length of the maxBufferLength except last one.
+         */
+        this.buffers = [];
+        this.capacity = capacity;
+        this._size = 0;
+        // allocate
+        const bufferNum = Math.ceil(capacity / maxBufferLength);
+        for (let i = 0; i < bufferNum; i++) {
+            let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;
+            if (len === 0) {
+                len = maxBufferLength;
+            }
+            this.buffers.push(Buffer.allocUnsafe(len));
+        }
+        if (buffers) {
+            this.fill(buffers, totalLength);
+        }
+    }
+    /**
+     * Fill the internal buffers with data in the input buffers serially
+     * with respect to the total length and the total capacity of the internal buffers.
+     * Data copied will be shift out of the input buffers.
+     *
+     * @param buffers - Input buffers containing the data to be filled in the pooled buffer
+     * @param totalLength - Total length of the data to be filled in.
+     *
+     */
+    fill(buffers, totalLength) {
+        this._size = Math.min(this.capacity, totalLength);
+        let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;
+        while (totalCopiedNum < this._size) {
+            const source = buffers[i];
+            const target = this.buffers[j];
+            const copiedNum = source.copy(target, targetOffset, sourceOffset);
+            totalCopiedNum += copiedNum;
+            sourceOffset += copiedNum;
+            targetOffset += copiedNum;
+            if (sourceOffset === source.length) {
+                i++;
+                sourceOffset = 0;
+            }
+            if (targetOffset === target.length) {
+                j++;
+                targetOffset = 0;
+            }
+        }
+        // clear copied from source buffers
+        buffers.splice(0, i);
+        if (buffers.length > 0) {
+            buffers[0] = buffers[0].slice(sourceOffset);
+        }
+    }
+    /**
+     * Get the readable stream assembled from all the data in the internal buffers.
+     *
+     */
+    getReadableStream() {
+        return new BuffersStream(this.buffers, this.size);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * This class accepts a Node.js Readable stream as input, and keeps reading data
+ * from the stream into the internal buffer structure, until it reaches maxBuffers.
+ * Every available buffer will try to trigger outgoingHandler.
+ *
+ * The internal buffer structure includes an incoming buffer array, and a outgoing
+ * buffer array. The incoming buffer array includes the "empty" buffers can be filled
+ * with new incoming data. The outgoing array includes the filled buffers to be
+ * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.
+ *
+ * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING
+ *
+ * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers
+ *
+ * PERFORMANCE IMPROVEMENT TIPS:
+ * 1. Input stream highWaterMark is better to set a same value with bufferSize
+ *    parameter, which will avoid Buffer.concat() operations.
+ * 2. concurrency should set a smaller value than maxBuffers, which is helpful to
+ *    reduce the possibility when a outgoing handler waits for the stream data.
+ *    in this situation, outgoing handlers are blocked.
+ *    Outgoing queue shouldn't be empty.
+ */
+class BufferScheduler {
+    /**
+     * Creates an instance of BufferScheduler.
+     *
+     * @param readable - A Node.js Readable stream
+     * @param bufferSize - Buffer size of every maintained buffer
+     * @param maxBuffers - How many buffers can be allocated
+     * @param outgoingHandler - An async function scheduled to be
+     *                                          triggered when a buffer fully filled
+     *                                          with stream data
+     * @param concurrency - Concurrency of executing outgoingHandlers (>0)
+     * @param encoding - [Optional] Encoding of Readable stream when it's a string stream
+     */
+    constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {
+        /**
+         * An internal event emitter.
+         */
+        this.emitter = new events.EventEmitter();
+        /**
+         * An internal offset marker to track data offset in bytes of next outgoingHandler.
+         */
+        this.offset = 0;
+        /**
+         * An internal marker to track whether stream is end.
+         */
+        this.isStreamEnd = false;
+        /**
+         * An internal marker to track whether stream or outgoingHandler returns error.
+         */
+        this.isError = false;
+        /**
+         * How many handlers are executing.
+         */
+        this.executingOutgoingHandlers = 0;
+        /**
+         * How many buffers have been allocated.
+         */
+        this.numBuffers = 0;
+        /**
+         * Because this class doesn't know how much data every time stream pops, which
+         * is defined by highWaterMarker of the stream. So BufferScheduler will cache
+         * data received from the stream, when data in unresolvedDataArray exceeds the
+         * blockSize defined, it will try to concat a blockSize of buffer, fill into available
+         * buffers from incoming and push to outgoing array.
+         */
+        this.unresolvedDataArray = [];
+        /**
+         * How much data consisted in unresolvedDataArray.
+         */
+        this.unresolvedLength = 0;
+        /**
+         * The array includes all the available buffers can be used to fill data from stream.
+         */
+        this.incoming = [];
+        /**
+         * The array (queue) includes all the buffers filled from stream data.
+         */
+        this.outgoing = [];
+        if (bufferSize <= 0) {
+            throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);
+        }
+        if (maxBuffers <= 0) {
+            throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);
+        }
+        if (concurrency <= 0) {
+            throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);
+        }
+        this.bufferSize = bufferSize;
+        this.maxBuffers = maxBuffers;
+        this.readable = readable;
+        this.outgoingHandler = outgoingHandler;
+        this.concurrency = concurrency;
+        this.encoding = encoding;
+    }
+    /**
+     * Start the scheduler, will return error when stream of any of the outgoingHandlers
+     * returns error.
+     *
+     */
+    async do() {
+        return new Promise((resolve, reject) => {
+            this.readable.on("data", (data) => {
+                data = typeof data === "string" ? Buffer.from(data, this.encoding) : data;
+                this.appendUnresolvedData(data);
+                if (!this.resolveData()) {
+                    this.readable.pause();
+                }
+            });
+            this.readable.on("error", (err) => {
+                this.emitter.emit("error", err);
+            });
+            this.readable.on("end", () => {
+                this.isStreamEnd = true;
+                this.emitter.emit("checkEnd");
+            });
+            this.emitter.on("error", (err) => {
+                this.isError = true;
+                this.readable.pause();
+                reject(err);
+            });
+            this.emitter.on("checkEnd", () => {
+                if (this.outgoing.length > 0) {
+                    this.triggerOutgoingHandlers();
+                    return;
+                }
+                if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {
+                    if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {
+                        const buffer = this.shiftBufferFromUnresolvedDataArray();
+                        this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)
+                            .then(resolve)
+                            .catch(reject);
+                    }
+                    else if (this.unresolvedLength >= this.bufferSize) {
+                        return;
+                    }
+                    else {
+                        resolve();
+                    }
+                }
+            });
+        });
+    }
+    /**
+     * Insert a new data into unresolved array.
+     *
+     * @param data -
+     */
+    appendUnresolvedData(data) {
+        this.unresolvedDataArray.push(data);
+        this.unresolvedLength += data.length;
+    }
+    /**
+     * Try to shift a buffer with size in blockSize. The buffer returned may be less
+     * than blockSize when data in unresolvedDataArray is less than bufferSize.
+     *
+     */
+    shiftBufferFromUnresolvedDataArray(buffer) {
+        if (!buffer) {
+            buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);
+        }
+        else {
+            buffer.fill(this.unresolvedDataArray, this.unresolvedLength);
+        }
+        this.unresolvedLength -= buffer.size;
+        return buffer;
+    }
+    /**
+     * Resolve data in unresolvedDataArray. For every buffer with size in blockSize
+     * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,
+     * then push it into outgoing to be handled by outgoing handler.
+     *
+     * Return false when available buffers in incoming are not enough, else true.
+     *
+     * @returns Return false when buffers in incoming are not enough, else true.
+     */
+    resolveData() {
+        while (this.unresolvedLength >= this.bufferSize) {
+            let buffer;
+            if (this.incoming.length > 0) {
+                buffer = this.incoming.shift();
+                this.shiftBufferFromUnresolvedDataArray(buffer);
+            }
+            else {
+                if (this.numBuffers < this.maxBuffers) {
+                    buffer = this.shiftBufferFromUnresolvedDataArray();
+                    this.numBuffers++;
+                }
+                else {
+                    // No available buffer, wait for buffer returned
+                    return false;
+                }
+            }
+            this.outgoing.push(buffer);
+            this.triggerOutgoingHandlers();
+        }
+        return true;
+    }
+    /**
+     * Try to trigger a outgoing handler for every buffer in outgoing. Stop when
+     * concurrency reaches.
+     */
+    async triggerOutgoingHandlers() {
+        let buffer;
+        do {
+            if (this.executingOutgoingHandlers >= this.concurrency) {
+                return;
+            }
+            buffer = this.outgoing.shift();
+            if (buffer) {
+                this.triggerOutgoingHandler(buffer);
+            }
+        } while (buffer);
+    }
+    /**
+     * Trigger a outgoing handler for a buffer shifted from outgoing.
+     *
+     * @param buffer -
+     */
+    async triggerOutgoingHandler(buffer) {
+        const bufferLength = buffer.size;
+        this.executingOutgoingHandlers++;
+        this.offset += bufferLength;
+        try {
+            await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength);
+        }
+        catch (err) {
+            this.emitter.emit("error", err);
+            return;
+        }
+        this.executingOutgoingHandlers--;
+        this.reuseBuffer(buffer);
+        this.emitter.emit("checkEnd");
+    }
+    /**
+     * Return buffer used by outgoing handler into incoming.
+     *
+     * @param buffer -
+     */
+    reuseBuffer(buffer) {
+        this.incoming.push(buffer);
+        if (!this.isError && this.resolveData() && !this.isStreamEnd) {
+            this.readable.resume();
+        }
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * Reads a readable stream into buffer. Fill the buffer from offset to end.
+ *
+ * @param stream - A Node.js Readable stream
+ * @param buffer - Buffer to be filled, length must greater than or equal to offset
+ * @param offset - From which position in the buffer to be filled, inclusive
+ * @param end - To which position in the buffer to be filled, exclusive
+ * @param encoding - Encoding of the Readable stream
+ */
+async function streamToBuffer(stream, buffer, offset, end, encoding) {
+    let pos = 0; // Position in stream
+    const count = end - offset; // Total amount of data needed in stream
+    return new Promise((resolve, reject) => {
+        const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT);
+        stream.on("readable", () => {
+            if (pos >= count) {
+                clearTimeout(timeout);
+                resolve();
+                return;
+            }
+            let chunk = stream.read();
+            if (!chunk) {
+                return;
+            }
+            if (typeof chunk === "string") {
+                chunk = Buffer.from(chunk, encoding);
+            }
+            // How much data needed in this chunk
+            const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;
+            buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);
+            pos += chunkLength;
+        });
+        stream.on("end", () => {
+            clearTimeout(timeout);
+            if (pos < count) {
+                reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`));
+            }
+            resolve();
+        });
+        stream.on("error", (msg) => {
+            clearTimeout(timeout);
+            reject(msg);
+        });
+    });
+}
+/**
+ * Reads a readable stream into buffer entirely.
+ *
+ * @param stream - A Node.js Readable stream
+ * @param buffer - Buffer to be filled, length must greater than or equal to offset
+ * @param encoding - Encoding of the Readable stream
+ * @returns with the count of bytes read.
+ * @throws `RangeError` If buffer size is not big enough.
+ */
+async function streamToBuffer2(stream, buffer, encoding) {
+    let pos = 0; // Position in stream
+    const bufferSize = buffer.length;
+    return new Promise((resolve, reject) => {
+        stream.on("readable", () => {
+            let chunk = stream.read();
+            if (!chunk) {
+                return;
+            }
+            if (typeof chunk === "string") {
+                chunk = Buffer.from(chunk, encoding);
+            }
+            if (pos + chunk.length > bufferSize) {
+                reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));
+                return;
+            }
+            buffer.fill(chunk, pos, pos + chunk.length);
+            pos += chunk.length;
+        });
+        stream.on("end", () => {
+            resolve(pos);
+        });
+        stream.on("error", reject);
+    });
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.
+ *
+ * @param rs - The read stream.
+ * @param file - Destination file path.
+ */
+async function readStreamToLocalFile(rs, file) {
+    return new Promise((resolve, reject) => {
+        const ws = fs__namespace.createWriteStream(file);
+        rs.on("error", (err) => {
+            reject(err);
+        });
+        ws.on("error", (err) => {
+            reject(err);
+        });
+        ws.on("close", resolve);
+        rs.pipe(ws);
+    });
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Promisified version of fs.stat().
+ */
+const fsStat = util__namespace.promisify(fs__namespace.stat);
+const fsCreateReadStream = fs__namespace.createReadStream;
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,
+ * append blob, or page blob.
+ */
+class BlobClient extends StorageClient {
+    /**
+     * The name of the blob.
+     */
+    get name() {
+        return this._name;
+    }
+    /**
+     * The name of the storage container the blob is associated with.
+     */
+    get containerName() {
+        return this._containerName;
+    }
+    constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        options = options || {};
+        let pipeline;
+        let url;
+        if (isPipelineLike(credentialOrPipelineOrContainerName)) {
+            // (url: string, pipeline: Pipeline)
+            url = urlOrConnectionString;
+            pipeline = credentialOrPipelineOrContainerName;
+        }
+        else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+            credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+            coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            url = urlOrConnectionString;
+            options = blobNameOrOptions;
+            pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+        }
+        else if (!credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName !== "string") {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            // The second parameter is undefined. Use anonymous credential.
+            url = urlOrConnectionString;
+            if (blobNameOrOptions && typeof blobNameOrOptions !== "string") {
+                options = blobNameOrOptions;
+            }
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        else if (credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName === "string" &&
+            blobNameOrOptions &&
+            typeof blobNameOrOptions === "string") {
+            // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+            const containerName = credentialOrPipelineOrContainerName;
+            const blobName = blobNameOrOptions;
+            const extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+            if (extractedCreds.kind === "AccountConnString") {
+                if (coreUtil.isNode) {
+                    const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+                    url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+                    if (!options.proxyOptions) {
+                        options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri);
+                    }
+                    pipeline = newPipeline(sharedKeyCredential, options);
+                }
+                else {
+                    throw new Error("Account connection string is only supported in Node.js environment");
+                }
+            }
+            else if (extractedCreds.kind === "SASConnString") {
+                url =
+                    appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+                        "?" +
+                        extractedCreds.accountSas;
+                pipeline = newPipeline(new AnonymousCredential(), options);
+            }
+            else {
+                throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+            }
+        }
+        else {
+            throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+        }
+        super(url, pipeline);
+        ({ blobName: this._name, containerName: this._containerName } =
+            this.getBlobAndContainerNamesFromUrl());
+        this.blobContext = this.storageClientContext.blob;
+        this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT);
+        this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID);
+    }
+    /**
+     * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.
+     * Provide "" will remove the snapshot and return a Client to the base blob.
+     *
+     * @param snapshot - The snapshot timestamp.
+     * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp
+     */
+    withSnapshot(snapshot) {
+        return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+    }
+    /**
+     * Creates a new BlobClient object pointing to a version of this blob.
+     * Provide "" will remove the versionId and return a Client to the base blob.
+     *
+     * @param versionId - The versionId.
+     * @returns A new BlobClient object pointing to the version of this blob.
+     */
+    withVersion(versionId) {
+        return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline);
+    }
+    /**
+     * Creates a AppendBlobClient object.
+     *
+     */
+    getAppendBlobClient() {
+        return new AppendBlobClient(this.url, this.pipeline);
+    }
+    /**
+     * Creates a BlockBlobClient object.
+     *
+     */
+    getBlockBlobClient() {
+        return new BlockBlobClient(this.url, this.pipeline);
+    }
+    /**
+     * Creates a PageBlobClient object.
+     *
+     */
+    getPageBlobClient() {
+        return new PageBlobClient(this.url, this.pipeline);
+    }
+    /**
+     * Reads or downloads a blob from the system, including its metadata and properties.
+     * You can also call Get Blob to read a snapshot.
+     *
+     * * In Node.js, data returns in a Readable stream readableStreamBody
+     * * In browsers, data returns in a promise blobBody
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob
+     *
+     * @param offset - From which position of the blob to download, greater than or equal to 0
+     * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined
+     * @param options - Optional options to Blob Download operation.
+     *
+     *
+     * Example usage (Node.js):
+     *
+     * ```js
+     * // Download and convert a blob to a string
+     * const downloadBlockBlobResponse = await blobClient.download();
+     * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);
+     * console.log("Downloaded blob content:", downloaded.toString());
+     *
+     * async function streamToBuffer(readableStream) {
+     * return new Promise((resolve, reject) => {
+     * const chunks = [];
+     * readableStream.on("data", (data) => {
+     * chunks.push(data instanceof Buffer ? data : Buffer.from(data));
+     * });
+     * readableStream.on("end", () => {
+     * resolve(Buffer.concat(chunks));
+     * });
+     * readableStream.on("error", reject);
+     * });
+     * }
+     * ```
+     *
+     * Example usage (browser):
+     *
+     * ```js
+     * // Download and convert a blob to a string
+     * const downloadBlockBlobResponse = await blobClient.download();
+     * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);
+     * console.log(
+     *   "Downloaded blob content",
+     *   downloaded
+     * );
+     *
+     * async function blobToString(blob: Blob): Promise<string> {
+     *   const fileReader = new FileReader();
+     *   return new Promise<string>((resolve, reject) => {
+     *     fileReader.onloadend = (ev: any) => {
+     *       resolve(ev.target!.result);
+     *     };
+     *     fileReader.onerror = reject;
+     *     fileReader.readAsText(blob);
+     *   });
+     * }
+     * ```
+     */
+    async download(offset = 0, count, options = {}) {
+        options.conditions = options.conditions || {};
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => {
+            var _a;
+            const res = assertResponse(await this.blobContext.download({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                requestOptions: {
+                    onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream
+                },
+                range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),
+                rangeGetContentMD5: options.rangeGetContentMD5,
+                rangeGetContentCRC64: options.rangeGetContentCrc64,
+                snapshot: options.snapshot,
+                cpkInfo: options.customerProvidedKey,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) });
+            // Return browser response immediately
+            if (!coreUtil.isNode) {
+                return wrappedRes;
+            }
+            // We support retrying when download stream unexpected ends in Node.js runtime
+            // Following code shouldn't be bundled into browser build, however some
+            // bundlers may try to bundle following code and "FileReadResponse.ts".
+            // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts"
+            // The config is in package.json "browser" field
+            if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {
+                // TODO: Default value or make it a required parameter?
+                options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;
+            }
+            if (res.contentLength === undefined) {
+                throw new RangeError(`File download response doesn't contain valid content length header`);
+            }
+            if (!res.etag) {
+                throw new RangeError(`File download response doesn't contain valid etag header`);
+            }
+            return new BlobDownloadResponse(wrappedRes, async (start) => {
+                var _a;
+                const updatedDownloadOptions = {
+                    leaseAccessConditions: options.conditions,
+                    modifiedAccessConditions: {
+                        ifMatch: options.conditions.ifMatch || res.etag,
+                        ifModifiedSince: options.conditions.ifModifiedSince,
+                        ifNoneMatch: options.conditions.ifNoneMatch,
+                        ifUnmodifiedSince: options.conditions.ifUnmodifiedSince,
+                        ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions,
+                    },
+                    range: rangeToString({
+                        count: offset + res.contentLength - start,
+                        offset: start,
+                    }),
+                    rangeGetContentMD5: options.rangeGetContentMD5,
+                    rangeGetContentCRC64: options.rangeGetContentCrc64,
+                    snapshot: options.snapshot,
+                    cpkInfo: options.customerProvidedKey,
+                };
+                // Debug purpose only
+                // console.log(
+                //   `Read from internal stream, range: ${
+                //     updatedOptions.range
+                //   }, options: ${JSON.stringify(updatedOptions)}`
+                // );
+                return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody;
+            }, offset, res.contentLength, {
+                maxRetryRequests: options.maxRetryRequests,
+                onProgress: options.onProgress,
+            });
+        });
+    }
+    /**
+     * Returns true if the Azure blob resource represented by this client exists; false otherwise.
+     *
+     * NOTE: use this function with care since an existing blob might be deleted by other clients or
+     * applications. Vice versa new blobs might be added by other clients or applications after this
+     * function completes.
+     *
+     * @param options - options to Exists operation.
+     */
+    async exists(options = {}) {
+        return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => {
+            try {
+                ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+                await this.getProperties({
+                    abortSignal: options.abortSignal,
+                    customerProvidedKey: options.customerProvidedKey,
+                    conditions: options.conditions,
+                    tracingOptions: updatedOptions.tracingOptions,
+                });
+                return true;
+            }
+            catch (e) {
+                if (e.statusCode === 404) {
+                    // Expected exception when checking blob existence
+                    return false;
+                }
+                else if (e.statusCode === 409 &&
+                    (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||
+                        e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) {
+                    // Expected exception when checking blob existence
+                    return true;
+                }
+                throw e;
+            }
+        });
+    }
+    /**
+     * Returns all user-defined metadata, standard HTTP properties, and system properties
+     * for the blob. It does not return the content of the blob.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties
+     *
+     * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
+     * they originally contained uppercase characters. This differs from the metadata keys returned by
+     * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which
+     * will retain their original casing.
+     *
+     * @param options - Optional options to Get Properties operation.
+     */
+    async getProperties(options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => {
+            var _a;
+            const res = assertResponse(await this.blobContext.getProperties({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) });
+        });
+    }
+    /**
+     * Marks the specified blob or snapshot for deletion. The blob is later deleted
+     * during garbage collection. Note that in order to delete a blob, you must delete
+     * all of its snapshots. You can delete both at the same time with the Delete
+     * Blob operation.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
+     *
+     * @param options - Optional options to Blob Delete operation.
+     */
+    async delete(options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.blobContext.delete({
+                abortSignal: options.abortSignal,
+                deleteSnapshots: options.deleteSnapshots,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted
+     * during garbage collection. Note that in order to delete a blob, you must delete
+     * all of its snapshots. You can delete both at the same time with the Delete
+     * Blob operation.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
+     *
+     * @param options - Optional options to Blob Delete operation.
+     */
+    async deleteIfExists(options = {}) {
+        return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => {
+            var _a, _b;
+            try {
+                const res = assertResponse(await this.delete(updatedOptions));
+                return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
+            }
+            catch (e) {
+                if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") {
+                    return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
+                }
+                throw e;
+            }
+        });
+    }
+    /**
+     * Restores the contents and metadata of soft deleted blob and any associated
+     * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29
+     * or later.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob
+     *
+     * @param options - Optional options to Blob Undelete operation.
+     */
+    async undelete(options = {}) {
+        return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => {
+            return assertResponse(await this.blobContext.undelete({
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Sets system properties on the blob.
+     *
+     * If no value provided, or no value provided for the specified blob HTTP headers,
+     * these blob HTTP headers without a value will be cleared.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties
+     *
+     * @param blobHTTPHeaders - If no value provided, or no value provided for
+     *                                                   the specified blob HTTP headers, these blob HTTP
+     *                                                   headers without a value will be cleared.
+     *                                                   A common header to set is `blobContentType`
+     *                                                   enabling the browser to provide functionality
+     *                                                   based on file type.
+     * @param options - Optional options to Blob Set HTTP Headers operation.
+     */
+    async setHTTPHeaders(blobHTTPHeaders, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.blobContext.setHttpHeaders({
+                abortSignal: options.abortSignal,
+                blobHttpHeaders: blobHTTPHeaders,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Sets user-defined metadata for the specified blob as one or more name-value pairs.
+     *
+     * If no option provided, or no metadata defined in the parameter, the blob
+     * metadata will be removed.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata
+     *
+     * @param metadata - Replace existing metadata with this value.
+     *                               If no value provided the existing metadata will be removed.
+     * @param options - Optional options to Set Metadata operation.
+     */
+    async setMetadata(metadata, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.blobContext.setMetadata({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Sets tags on the underlying blob.
+     * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters.  Tag values must be between 0 and 256 characters.
+     * Valid tag key and value characters include lower and upper case letters, digits (0-9),
+     * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').
+     *
+     * @param tags -
+     * @param options -
+     */
+    async setTags(tags, options = {}) {
+        return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.blobContext.setTags({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+                tags: toBlobTags(tags),
+            }));
+        });
+    }
+    /**
+     * Gets the tags associated with the underlying blob.
+     *
+     * @param options -
+     */
+    async getTags(options = {}) {
+        return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => {
+            var _a;
+            const response = assertResponse(await this.blobContext.getTags({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} });
+            return wrappedResponse;
+        });
+    }
+    /**
+     * Get a {@link BlobLeaseClient} that manages leases on the blob.
+     *
+     * @param proposeLeaseId - Initial proposed lease Id.
+     * @returns A new BlobLeaseClient object for managing leases on the blob.
+     */
+    getBlobLeaseClient(proposeLeaseId) {
+        return new BlobLeaseClient(this, proposeLeaseId);
+    }
+    /**
+     * Creates a read-only snapshot of a blob.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob
+     *
+     * @param options - Optional options to the Blob Create Snapshot operation.
+     */
+    async createSnapshot(options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.blobContext.createSnapshot({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                metadata: options.metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Asynchronously copies a blob to a destination within the storage account.
+     * This method returns a long running operation poller that allows you to wait
+     * indefinitely until the copy is completed.
+     * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.
+     * Note that the onProgress callback will not be invoked if the operation completes in the first
+     * request, and attempting to cancel a completed copy will result in an error being thrown.
+     *
+     * In version 2012-02-12 and later, the source for a Copy Blob operation can be
+     * a committed blob in any Azure storage account.
+     * Beginning with version 2015-02-21, the source for a Copy Blob operation can be
+     * an Azure file in any Azure storage account.
+     * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob
+     * operation to copy from another storage account.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob
+     *
+     * Example using automatic polling:
+     *
+     * ```js
+     * const copyPoller = await blobClient.beginCopyFromURL('url');
+     * const result = await copyPoller.pollUntilDone();
+     * ```
+     *
+     * Example using manual polling:
+     *
+     * ```js
+     * const copyPoller = await blobClient.beginCopyFromURL('url');
+     * while (!poller.isDone()) {
+     *    await poller.poll();
+     * }
+     * const result = copyPoller.getResult();
+     * ```
+     *
+     * Example using progress updates:
+     *
+     * ```js
+     * const copyPoller = await blobClient.beginCopyFromURL('url', {
+     *   onProgress(state) {
+     *     console.log(`Progress: ${state.copyProgress}`);
+     *   }
+     * });
+     * const result = await copyPoller.pollUntilDone();
+     * ```
+     *
+     * Example using a changing polling interval (default 15 seconds):
+     *
+     * ```js
+     * const copyPoller = await blobClient.beginCopyFromURL('url', {
+     *   intervalInMs: 1000 // poll blob every 1 second for copy progress
+     * });
+     * const result = await copyPoller.pollUntilDone();
+     * ```
+     *
+     * Example using copy cancellation:
+     *
+     * ```js
+     * const copyPoller = await blobClient.beginCopyFromURL('url');
+     * // cancel operation after starting it.
+     * try {
+     *   await copyPoller.cancelOperation();
+     *   // calls to get the result now throw PollerCancelledError
+     *   await copyPoller.getResult();
+     * } catch (err) {
+     *   if (err.name === 'PollerCancelledError') {
+     *     console.log('The copy was cancelled.');
+     *   }
+     * }
+     * ```
+     *
+     * @param copySource - url to the source Azure Blob/File.
+     * @param options - Optional options to the Blob Start Copy From URL operation.
+     */
+    async beginCopyFromURL(copySource, options = {}) {
+        const client = {
+            abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),
+            getProperties: (...args) => this.getProperties(...args),
+            startCopyFromURL: (...args) => this.startCopyFromURL(...args),
+        };
+        const poller = new BlobBeginCopyFromUrlPoller({
+            blobClient: client,
+            copySource,
+            intervalInMs: options.intervalInMs,
+            onProgress: options.onProgress,
+            resumeFrom: options.resumeFrom,
+            startCopyFromURLOptions: options,
+        });
+        // Trigger the startCopyFromURL call by calling poll.
+        // Any errors from this method should be surfaced to the user.
+        await poller.poll();
+        return poller;
+    }
+    /**
+     * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero
+     * length and full metadata. Version 2012-02-12 and newer.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob
+     *
+     * @param copyId - Id of the Copy From URL operation.
+     * @param options - Optional options to the Blob Abort Copy From URL operation.
+     */
+    async abortCopyFromURL(copyId, options = {}) {
+        return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => {
+            return assertResponse(await this.blobContext.abortCopyFromURL(copyId, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not
+     * return a response until the copy is complete.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url
+     *
+     * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication
+     * @param options -
+     */
+    async syncCopyFromURL(copySource, options = {}) {
+        options.conditions = options.conditions || {};
+        options.sourceConditions = options.sourceConditions || {};
+        return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => {
+            var _a, _b, _c, _d, _e, _f, _g;
+            return assertResponse(await this.blobContext.copyFromURL(copySource, {
+                abortSignal: options.abortSignal,
+                metadata: options.metadata,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                sourceModifiedAccessConditions: {
+                    sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch,
+                    sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince,
+                    sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch,
+                    sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince,
+                },
+                sourceContentMD5: options.sourceContentMD5,
+                copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),
+                tier: toAccessTier(options.tier),
+                blobTagsString: toBlobTagsString(options.tags),
+                immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn,
+                immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode,
+                legalHold: options.legalHold,
+                encryptionScope: options.encryptionScope,
+                copySourceTags: options.copySourceTags,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Sets the tier on a blob. The operation is allowed on a page blob in a premium
+     * storage account and on a block blob in a blob storage account (locally redundant
+     * storage only). A premium page blob's tier determines the allowed size, IOPS,
+     * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive
+     * storage type. This operation does not update the blob's ETag.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier
+     *
+     * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.
+     * @param options - Optional options to the Blob Set Tier operation.
+     */
+    async setAccessTier(tier, options = {}) {
+        return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.blobContext.setTier(toAccessTier(tier), {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                rehydratePriority: options.rehydratePriority,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    async downloadToBuffer(param1, param2, param3, param4 = {}) {
+        var _a;
+        let buffer;
+        let offset = 0;
+        let count = 0;
+        let options = param4;
+        if (param1 instanceof Buffer) {
+            buffer = param1;
+            offset = param2 || 0;
+            count = typeof param3 === "number" ? param3 : 0;
+        }
+        else {
+            offset = typeof param1 === "number" ? param1 : 0;
+            count = typeof param2 === "number" ? param2 : 0;
+            options = param3 || {};
+        }
+        let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0;
+        if (blockSize < 0) {
+            throw new RangeError("blockSize option must be >= 0");
+        }
+        if (blockSize === 0) {
+            blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
+        }
+        if (offset < 0) {
+            throw new RangeError("offset option must be >= 0");
+        }
+        if (count && count <= 0) {
+            throw new RangeError("count option must be greater than 0");
+        }
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => {
+            // Customer doesn't specify length, get it
+            if (!count) {
+                const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }));
+                count = response.contentLength - offset;
+                if (count < 0) {
+                    throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`);
+                }
+            }
+            // Allocate the buffer of size = count if the buffer is not provided
+            if (!buffer) {
+                try {
+                    buffer = Buffer.alloc(count);
+                }
+                catch (error) {
+                    throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`);
+                }
+            }
+            if (buffer.length < count) {
+                throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`);
+            }
+            let transferProgress = 0;
+            const batch = new Batch(options.concurrency);
+            for (let off = offset; off < offset + count; off = off + blockSize) {
+                batch.addOperation(async () => {
+                    // Exclusive chunk end position
+                    let chunkEnd = offset + count;
+                    if (off + blockSize < chunkEnd) {
+                        chunkEnd = off + blockSize;
+                    }
+                    const response = await this.download(off, chunkEnd - off, {
+                        abortSignal: options.abortSignal,
+                        conditions: options.conditions,
+                        maxRetryRequests: options.maxRetryRequestsPerBlock,
+                        customerProvidedKey: options.customerProvidedKey,
+                        tracingOptions: updatedOptions.tracingOptions,
+                    });
+                    const stream = response.readableStreamBody;
+                    await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset);
+                    // Update progress after block is downloaded, in case of block trying
+                    // Could provide finer grained progress updating inside HTTP requests,
+                    // only if convenience layer download try is enabled
+                    transferProgress += chunkEnd - off;
+                    if (options.onProgress) {
+                        options.onProgress({ loadedBytes: transferProgress });
+                    }
+                });
+            }
+            await batch.do();
+            return buffer;
+        });
+    }
+    /**
+     * ONLY AVAILABLE IN NODE.JS RUNTIME.
+     *
+     * Downloads an Azure Blob to a local file.
+     * Fails if the the given file path already exits.
+     * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.
+     *
+     * @param filePath -
+     * @param offset - From which position of the block blob to download.
+     * @param count - How much data to be downloaded. Will download to the end when passing undefined.
+     * @param options - Options to Blob download options.
+     * @returns The response data for blob download operation,
+     *                                                 but with readableStreamBody set to undefined since its
+     *                                                 content is already read and written into a local file
+     *                                                 at the specified path.
+     */
+    async downloadToFile(filePath, offset = 0, count, options = {}) {
+        return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => {
+            const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }));
+            if (response.readableStreamBody) {
+                await readStreamToLocalFile(response.readableStreamBody, filePath);
+            }
+            // The stream is no longer accessible so setting it to undefined.
+            response.blobDownloadStream = undefined;
+            return response;
+        });
+    }
+    getBlobAndContainerNamesFromUrl() {
+        let containerName;
+        let blobName;
+        try {
+            //  URL may look like the following
+            // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString";
+            // "https://myaccount.blob.core.windows.net/mycontainer/blob";
+            // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString";
+            // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt";
+            // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`
+            // http://localhost:10001/devstoreaccount1/containername/blob
+            const parsedUrl = new URL(this.url);
+            if (parsedUrl.host.split(".")[1] === "blob") {
+                // "https://myaccount.blob.core.windows.net/containername/blob".
+                // .getPath() -> /containername/blob
+                const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?");
+                containerName = pathComponents[1];
+                blobName = pathComponents[3];
+            }
+            else if (isIpEndpointStyle(parsedUrl)) {
+                // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob
+                // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob
+                // .getPath() -> /devstoreaccount1/containername/blob
+                const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?");
+                containerName = pathComponents[2];
+                blobName = pathComponents[4];
+            }
+            else {
+                // "https://customdomain.com/containername/blob".
+                // .getPath() -> /containername/blob
+                const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?");
+                containerName = pathComponents[1];
+                blobName = pathComponents[3];
+            }
+            // decode the encoded blobName, containerName - to get all the special characters that might be present in them
+            containerName = decodeURIComponent(containerName);
+            blobName = decodeURIComponent(blobName);
+            // Azure Storage Server will replace "\" with "/" in the blob names
+            //   doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName
+            blobName = blobName.replace(/\\/g, "/");
+            if (!containerName) {
+                throw new Error("Provided containerName is invalid.");
+            }
+            return { blobName, containerName };
+        }
+        catch (error) {
+            throw new Error("Unable to extract blobName and containerName with provided information.");
+        }
+    }
+    /**
+     * Asynchronously copies a blob to a destination within the storage account.
+     * In version 2012-02-12 and later, the source for a Copy Blob operation can be
+     * a committed blob in any Azure storage account.
+     * Beginning with version 2015-02-21, the source for a Copy Blob operation can be
+     * an Azure file in any Azure storage account.
+     * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob
+     * operation to copy from another storage account.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob
+     *
+     * @param copySource - url to the source Azure Blob/File.
+     * @param options - Optional options to the Blob Start Copy From URL operation.
+     */
+    async startCopyFromURL(copySource, options = {}) {
+        return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => {
+            var _a, _b, _c;
+            options.conditions = options.conditions || {};
+            options.sourceConditions = options.sourceConditions || {};
+            return assertResponse(await this.blobContext.startCopyFromURL(copySource, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                metadata: options.metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                sourceModifiedAccessConditions: {
+                    sourceIfMatch: options.sourceConditions.ifMatch,
+                    sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
+                    sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
+                    sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,
+                    sourceIfTags: options.sourceConditions.tagConditions,
+                },
+                immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn,
+                immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode,
+                legalHold: options.legalHold,
+                rehydratePriority: options.rehydratePriority,
+                tier: toAccessTier(options.tier),
+                blobTagsString: toBlobTagsString(options.tags),
+                sealBlob: options.sealBlob,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Only available for BlobClient constructed with a shared key credential.
+     *
+     * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties
+     * and parameters passed in. The SAS is signed by the shared key credential of the client.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateSasUrl(options) {
+        return new Promise((resolve) => {
+            if (!(this.credential instanceof StorageSharedKeyCredential)) {
+                throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential");
+            }
+            const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString();
+            resolve(appendToURLQuery(this.url, sas));
+        });
+    }
+    /**
+     * Only available for BlobClient constructed with a shared key credential.
+     *
+     * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on
+     * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    generateSasStringToSign(options) {
+        if (!(this.credential instanceof StorageSharedKeyCredential)) {
+            throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential");
+        }
+        return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign;
+    }
+    /**
+     *
+     * Generates a Blob Service Shared Access Signature (SAS) URI based on
+     * the client properties and parameters passed in. The SAS is signed by the input user delegation key.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @param userDelegationKey -  Return value of `blobServiceClient.getUserDelegationKey()`
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateUserDelegationSasUrl(options, userDelegationKey) {
+        return new Promise((resolve) => {
+            const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString();
+            resolve(appendToURLQuery(this.url, sas));
+        });
+    }
+    /**
+     * Only available for BlobClient constructed with a shared key credential.
+     *
+     * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on
+     * the client properties and parameters passed in. The SAS is signed by the input user delegation key.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @param userDelegationKey -  Return value of `blobServiceClient.getUserDelegationKey()`
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateUserDelegationSasStringToSign(options, userDelegationKey) {
+        return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign;
+    }
+    /**
+     * Delete the immutablility policy on the blob.
+     *
+     * @param options - Optional options to delete immutability policy on the blob.
+     */
+    async deleteImmutabilityPolicy(options = {}) {
+        return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => {
+            return assertResponse(await this.blobContext.deleteImmutabilityPolicy({
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Set immutability policy on the blob.
+     *
+     * @param options - Optional options to set immutability policy on the blob.
+     */
+    async setImmutabilityPolicy(immutabilityPolicy, options = {}) {
+        return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => {
+            return assertResponse(await this.blobContext.setImmutabilityPolicy({
+                immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,
+                immutabilityPolicyMode: immutabilityPolicy.policyMode,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Set legal hold on the blob.
+     *
+     * @param options - Optional options to set legal hold on the blob.
+     */
+    async setLegalHold(legalHoldEnabled, options = {}) {
+        return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => {
+            return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, {
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * The Get Account Information operation returns the sku name and account kind
+     * for the specified account.
+     * The Get Account Information operation is available on service versions beginning
+     * with version 2018-03-28.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information
+     *
+     * @param options - Options to the Service Get Account Info operation.
+     * @returns Response data for the Service Get Account Info operation.
+     */
+    async getAccountInfo(options = {}) {
+        return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => {
+            return assertResponse(await this.blobContext.getAccountInfo({
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+}
+/**
+ * AppendBlobClient defines a set of operations applicable to append blobs.
+ */
+class AppendBlobClient extends BlobClient {
+    constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
+        //   super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+        let pipeline;
+        let url;
+        options = options || {};
+        if (isPipelineLike(credentialOrPipelineOrContainerName)) {
+            // (url: string, pipeline: Pipeline)
+            url = urlOrConnectionString;
+            pipeline = credentialOrPipelineOrContainerName;
+        }
+        else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+            credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+            coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)      url = urlOrConnectionString;
+            url = urlOrConnectionString;
+            options = blobNameOrOptions;
+            pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+        }
+        else if (!credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName !== "string") {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            url = urlOrConnectionString;
+            // The second parameter is undefined. Use anonymous credential.
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        else if (credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName === "string" &&
+            blobNameOrOptions &&
+            typeof blobNameOrOptions === "string") {
+            // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+            const containerName = credentialOrPipelineOrContainerName;
+            const blobName = blobNameOrOptions;
+            const extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+            if (extractedCreds.kind === "AccountConnString") {
+                if (coreUtil.isNode) {
+                    const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+                    url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+                    if (!options.proxyOptions) {
+                        options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri);
+                    }
+                    pipeline = newPipeline(sharedKeyCredential, options);
+                }
+                else {
+                    throw new Error("Account connection string is only supported in Node.js environment");
+                }
+            }
+            else if (extractedCreds.kind === "SASConnString") {
+                url =
+                    appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+                        "?" +
+                        extractedCreds.accountSas;
+                pipeline = newPipeline(new AnonymousCredential(), options);
+            }
+            else {
+                throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+            }
+        }
+        else {
+            throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+        }
+        super(url, pipeline);
+        this.appendBlobContext = this.storageClientContext.appendBlob;
+    }
+    /**
+     * Creates a new AppendBlobClient object identical to the source but with the
+     * specified snapshot timestamp.
+     * Provide "" will remove the snapshot and return a Client to the base blob.
+     *
+     * @param snapshot - The snapshot timestamp.
+     * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.
+     */
+    withSnapshot(snapshot) {
+        return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+    }
+    /**
+     * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+     *
+     * @param options - Options to the Append Block Create operation.
+     *
+     *
+     * Example usage:
+     *
+     * ```js
+     * const appendBlobClient = containerClient.getAppendBlobClient("<blob name>");
+     * await appendBlobClient.create();
+     * ```
+     */
+    async create(options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => {
+            var _a, _b, _c;
+            return assertResponse(await this.appendBlobContext.create(0, {
+                abortSignal: options.abortSignal,
+                blobHttpHeaders: options.blobHTTPHeaders,
+                leaseAccessConditions: options.conditions,
+                metadata: options.metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn,
+                immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode,
+                legalHold: options.legalHold,
+                blobTagsString: toBlobTagsString(options.tags),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.
+     * If the blob with the same name already exists, the content of the existing blob will remain unchanged.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+     *
+     * @param options -
+     */
+    async createIfNotExists(options = {}) {
+        const conditions = { ifNoneMatch: ETagAny };
+        return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => {
+            var _a, _b;
+            try {
+                const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })));
+                return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
+            }
+            catch (e) {
+                if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") {
+                    return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
+                }
+                throw e;
+            }
+        });
+    }
+    /**
+     * Seals the append blob, making it read only.
+     *
+     * @param options -
+     */
+    async seal(options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.appendBlobContext.seal({
+                abortSignal: options.abortSignal,
+                appendPositionAccessConditions: options.conditions,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Commits a new block of data to the end of the existing append blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/append-block
+     *
+     * @param body - Data to be appended.
+     * @param contentLength - Length of the body in bytes.
+     * @param options - Options to the Append Block operation.
+     *
+     *
+     * Example usage:
+     *
+     * ```js
+     * const content = "Hello World!";
+     *
+     * // Create a new append blob and append data to the blob.
+     * const newAppendBlobClient = containerClient.getAppendBlobClient("<blob name>");
+     * await newAppendBlobClient.create();
+     * await newAppendBlobClient.appendBlock(content, content.length);
+     *
+     * // Append data to an existing append blob.
+     * const existingAppendBlobClient = containerClient.getAppendBlobClient("<blob name>");
+     * await existingAppendBlobClient.appendBlock(content, content.length);
+     * ```
+     */
+    async appendBlock(body, contentLength, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, {
+                abortSignal: options.abortSignal,
+                appendPositionAccessConditions: options.conditions,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                requestOptions: {
+                    onUploadProgress: options.onProgress,
+                },
+                transactionalContentMD5: options.transactionalContentMD5,
+                transactionalContentCrc64: options.transactionalContentCrc64,
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * The Append Block operation commits a new block of data to the end of an existing append blob
+     * where the contents are read from a source url.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url
+     *
+     * @param sourceURL -
+     *                 The url to the blob that will be the source of the copy. A source blob in the same storage account can
+     *                 be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
+     *                 must either be public or must be authenticated via a shared access signature. If the source blob is
+     *                 public, no authentication is required to perform the operation.
+     * @param sourceOffset - Offset in source to be appended
+     * @param count - Number of bytes to be appended as a block
+     * @param options -
+     */
+    async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) {
+        options.conditions = options.conditions || {};
+        options.sourceConditions = options.sourceConditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => {
+            var _a, _b, _c, _d, _e;
+            return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {
+                abortSignal: options.abortSignal,
+                sourceRange: rangeToString({ offset: sourceOffset, count }),
+                sourceContentMD5: options.sourceContentMD5,
+                sourceContentCrc64: options.sourceContentCrc64,
+                leaseAccessConditions: options.conditions,
+                appendPositionAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                sourceModifiedAccessConditions: {
+                    sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch,
+                    sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince,
+                    sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch,
+                    sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince,
+                },
+                copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+}
+/**
+ * BlockBlobClient defines a set of operations applicable to block blobs.
+ */
+class BlockBlobClient extends BlobClient {
+    constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
+        //   super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+        let pipeline;
+        let url;
+        options = options || {};
+        if (isPipelineLike(credentialOrPipelineOrContainerName)) {
+            // (url: string, pipeline: Pipeline)
+            url = urlOrConnectionString;
+            pipeline = credentialOrPipelineOrContainerName;
+        }
+        else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+            credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+            coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            url = urlOrConnectionString;
+            options = blobNameOrOptions;
+            pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+        }
+        else if (!credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName !== "string") {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            // The second parameter is undefined. Use anonymous credential.
+            url = urlOrConnectionString;
+            if (blobNameOrOptions && typeof blobNameOrOptions !== "string") {
+                options = blobNameOrOptions;
+            }
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        else if (credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName === "string" &&
+            blobNameOrOptions &&
+            typeof blobNameOrOptions === "string") {
+            // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+            const containerName = credentialOrPipelineOrContainerName;
+            const blobName = blobNameOrOptions;
+            const extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+            if (extractedCreds.kind === "AccountConnString") {
+                if (coreUtil.isNode) {
+                    const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+                    url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+                    if (!options.proxyOptions) {
+                        options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri);
+                    }
+                    pipeline = newPipeline(sharedKeyCredential, options);
+                }
+                else {
+                    throw new Error("Account connection string is only supported in Node.js environment");
+                }
+            }
+            else if (extractedCreds.kind === "SASConnString") {
+                url =
+                    appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+                        "?" +
+                        extractedCreds.accountSas;
+                pipeline = newPipeline(new AnonymousCredential(), options);
+            }
+            else {
+                throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+            }
+        }
+        else {
+            throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+        }
+        super(url, pipeline);
+        this.blockBlobContext = this.storageClientContext.blockBlob;
+        this._blobContext = this.storageClientContext.blob;
+    }
+    /**
+     * Creates a new BlockBlobClient object identical to the source but with the
+     * specified snapshot timestamp.
+     * Provide "" will remove the snapshot and return a URL to the base blob.
+     *
+     * @param snapshot - The snapshot timestamp.
+     * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.
+     */
+    withSnapshot(snapshot) {
+        return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+    }
+    /**
+     * ONLY AVAILABLE IN NODE.JS RUNTIME.
+     *
+     * Quick query for a JSON or CSV formatted blob.
+     *
+     * Example usage (Node.js):
+     *
+     * ```js
+     * // Query and convert a blob to a string
+     * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage");
+     * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();
+     * console.log("Query blob content:", downloaded);
+     *
+     * async function streamToBuffer(readableStream) {
+     *   return new Promise((resolve, reject) => {
+     *     const chunks = [];
+     *     readableStream.on("data", (data) => {
+     *       chunks.push(data instanceof Buffer ? data : Buffer.from(data));
+     *     });
+     *     readableStream.on("end", () => {
+     *       resolve(Buffer.concat(chunks));
+     *     });
+     *     readableStream.on("error", reject);
+     *   });
+     * }
+     * ```
+     *
+     * @param query -
+     * @param options -
+     */
+    async query(query, options = {}) {
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        if (!coreUtil.isNode) {
+            throw new Error("This operation currently is only supported in Node.js.");
+        }
+        return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => {
+            var _a;
+            const response = assertResponse(await this._blobContext.query({
+                abortSignal: options.abortSignal,
+                queryRequest: {
+                    queryType: "SQL",
+                    expression: query,
+                    inputSerialization: toQuerySerialization(options.inputTextConfiguration),
+                    outputSerialization: toQuerySerialization(options.outputTextConfiguration),
+                },
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            return new BlobQueryResponse(response, {
+                abortSignal: options.abortSignal,
+                onProgress: options.onProgress,
+                onError: options.onError,
+            });
+        });
+    }
+    /**
+     * Creates a new block blob, or updates the content of an existing block blob.
+     * Updating an existing block blob overwrites any existing metadata on the blob.
+     * Partial updates are not supported; the content of the existing blob is
+     * overwritten with the new content. To perform a partial update of a block blob's,
+     * use {@link stageBlock} and {@link commitBlockList}.
+     *
+     * This is a non-parallel uploading method, please use {@link uploadFile},
+     * {@link uploadStream} or {@link uploadBrowserData} for better performance
+     * with concurrency uploading.
+     *
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+     *
+     * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function
+     *                               which returns a new Readable stream whose offset is from data source beginning.
+     * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a
+     *                               string including non non-Base64/Hex-encoded characters.
+     * @param options - Options to the Block Blob Upload operation.
+     * @returns Response data for the Block Blob Upload operation.
+     *
+     * Example usage:
+     *
+     * ```js
+     * const content = "Hello world!";
+     * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
+     * ```
+     */
+    async upload(body, contentLength, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => {
+            var _a, _b, _c;
+            return assertResponse(await this.blockBlobContext.upload(contentLength, body, {
+                abortSignal: options.abortSignal,
+                blobHttpHeaders: options.blobHTTPHeaders,
+                leaseAccessConditions: options.conditions,
+                metadata: options.metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                requestOptions: {
+                    onUploadProgress: options.onProgress,
+                },
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn,
+                immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode,
+                legalHold: options.legalHold,
+                tier: toAccessTier(options.tier),
+                blobTagsString: toBlobTagsString(options.tags),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Creates a new Block Blob where the contents of the blob are read from a given URL.
+     * This API is supported beginning with the 2020-04-08 version. Partial updates
+     * are not supported with Put Blob from URL; the content of an existing blob is overwritten with
+     * the content of the new blob.  To perform partial updates to a block blob’s contents using a
+     * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.
+     *
+     * @param sourceURL - Specifies the URL of the blob. The value
+     *                           may be a URL of up to 2 KB in length that specifies a blob.
+     *                           The value should be URL-encoded as it would appear
+     *                           in a request URI. The source blob must either be public
+     *                           or must be authenticated via a shared access signature.
+     *                           If the source blob is public, no authentication is required
+     *                           to perform the operation. Here are some examples of source object URLs:
+     *                           - https://myaccount.blob.core.windows.net/mycontainer/myblob
+     *                           - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=<DateTime>
+     * @param options - Optional parameters.
+     */
+    async syncUploadFromURL(sourceURL, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => {
+            var _a, _b, _c, _d, _e, _f;
+            return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: {
+                    sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch,
+                    sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince,
+                    sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch,
+                    sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince,
+                    sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions,
+                }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions })));
+        });
+    }
+    /**
+     * Uploads the specified block to the block blob's "staging area" to be later
+     * committed by a call to commitBlockList.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-block
+     *
+     * @param blockId - A 64-byte value that is base64-encoded
+     * @param body - Data to upload to the staging area.
+     * @param contentLength - Number of bytes to upload.
+     * @param options - Options to the Block Blob Stage Block operation.
+     * @returns Response data for the Block Blob Stage Block operation.
+     */
+    async stageBlock(blockId, body, contentLength, options = {}) {
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => {
+            return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                requestOptions: {
+                    onUploadProgress: options.onProgress,
+                },
+                transactionalContentMD5: options.transactionalContentMD5,
+                transactionalContentCrc64: options.transactionalContentCrc64,
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * The Stage Block From URL operation creates a new block to be committed as part
+     * of a blob where the contents are read from a URL.
+     * This API is available starting in version 2018-03-28.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url
+     *
+     * @param blockId - A 64-byte value that is base64-encoded
+     * @param sourceURL - Specifies the URL of the blob. The value
+     *                           may be a URL of up to 2 KB in length that specifies a blob.
+     *                           The value should be URL-encoded as it would appear
+     *                           in a request URI. The source blob must either be public
+     *                           or must be authenticated via a shared access signature.
+     *                           If the source blob is public, no authentication is required
+     *                           to perform the operation. Here are some examples of source object URLs:
+     *                           - https://myaccount.blob.core.windows.net/mycontainer/myblob
+     *                           - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=<DateTime>
+     * @param offset - From which position of the blob to download, greater than or equal to 0
+     * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined
+     * @param options - Options to the Block Blob Stage Block From URL operation.
+     * @returns Response data for the Block Blob Stage Block From URL operation.
+     */
+    async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) {
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => {
+            return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                sourceContentMD5: options.sourceContentMD5,
+                sourceContentCrc64: options.sourceContentCrc64,
+                sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Writes a blob by specifying the list of block IDs that make up the blob.
+     * In order to be written as part of a blob, a block must have been successfully written
+     * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to
+     * update a blob by uploading only those blocks that have changed, then committing the new and existing
+     * blocks together. Any blocks not specified in the block list and permanently deleted.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list
+     *
+     * @param blocks -  Array of 64-byte value that is base64-encoded
+     * @param options - Options to the Block Blob Commit Block List operation.
+     * @returns Response data for the Block Blob Commit Block List operation.
+     */
+    async commitBlockList(blocks, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => {
+            var _a, _b, _c;
+            return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, {
+                abortSignal: options.abortSignal,
+                blobHttpHeaders: options.blobHTTPHeaders,
+                leaseAccessConditions: options.conditions,
+                metadata: options.metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn,
+                immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode,
+                legalHold: options.legalHold,
+                tier: toAccessTier(options.tier),
+                blobTagsString: toBlobTagsString(options.tags),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Returns the list of blocks that have been uploaded as part of a block blob
+     * using the specified block list filter.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list
+     *
+     * @param listType - Specifies whether to return the list of committed blocks,
+     *                                        the list of uncommitted blocks, or both lists together.
+     * @param options - Options to the Block Blob Get Block List operation.
+     * @returns Response data for the Block Blob Get Block List operation.
+     */
+    async getBlockList(listType, options = {}) {
+        return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => {
+            var _a;
+            const res = assertResponse(await this.blockBlobContext.getBlockList(listType, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            if (!res.committedBlocks) {
+                res.committedBlocks = [];
+            }
+            if (!res.uncommittedBlocks) {
+                res.uncommittedBlocks = [];
+            }
+            return res;
+        });
+    }
+    // High level functions
+    /**
+     * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.
+     *
+     * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is
+     * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.
+     * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}
+     * to commit the block list.
+     *
+     * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is
+     * `blobContentType`, enabling the browser to provide
+     * functionality based on file type.
+     *
+     * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView
+     * @param options -
+     */
+    async uploadData(data, options = {}) {
+        return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => {
+            if (coreUtil.isNode) {
+                let buffer;
+                if (data instanceof Buffer) {
+                    buffer = data;
+                }
+                else if (data instanceof ArrayBuffer) {
+                    buffer = Buffer.from(data);
+                }
+                else {
+                    data = data;
+                    buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);
+                }
+                return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions);
+            }
+            else {
+                const browserBlob = new Blob([data]);
+                return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions);
+            }
+        });
+    }
+    /**
+     * ONLY AVAILABLE IN BROWSERS.
+     *
+     * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.
+     *
+     * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.
+     * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call
+     * {@link commitBlockList} to commit the block list.
+     *
+     * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is
+     * `blobContentType`, enabling the browser to provide
+     * functionality based on file type.
+     *
+     * @deprecated Use {@link uploadData} instead.
+     *
+     * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView
+     * @param options - Options to upload browser data.
+     * @returns Response data for the Blob Upload operation.
+     */
+    async uploadBrowserData(browserData, options = {}) {
+        return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => {
+            const browserBlob = new Blob([browserData]);
+            return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions);
+        });
+    }
+    /**
+     *
+     * Uploads data to block blob. Requires a bodyFactory as the data source,
+     * which need to return a {@link HttpRequestBody} object with the offset and size provided.
+     *
+     * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is
+     * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.
+     * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}
+     * to commit the block list.
+     *
+     * @param bodyFactory -
+     * @param size - size of the data to upload.
+     * @param options - Options to Upload to Block Blob operation.
+     * @returns Response data for the Blob Upload operation.
+     */
+    async uploadSeekableInternal(bodyFactory, size, options = {}) {
+        var _a, _b;
+        let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0;
+        if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
+            throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`);
+        }
+        const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;
+        if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {
+            throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`);
+        }
+        if (blockSize === 0) {
+            if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {
+                throw new RangeError(`${size} is too larger to upload to a block blob.`);
+            }
+            if (size > maxSingleShotSize) {
+                blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);
+                if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {
+                    blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
+                }
+            }
+        }
+        if (!options.blobHTTPHeaders) {
+            options.blobHTTPHeaders = {};
+        }
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => {
+            if (size <= maxSingleShotSize) {
+                return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions));
+            }
+            const numBlocks = Math.floor((size - 1) / blockSize) + 1;
+            if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {
+                throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` +
+                    `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`);
+            }
+            const blockList = [];
+            const blockIDPrefix = coreUtil.randomUUID();
+            let transferProgress = 0;
+            const batch = new Batch(options.concurrency);
+            for (let i = 0; i < numBlocks; i++) {
+                batch.addOperation(async () => {
+                    const blockID = generateBlockID(blockIDPrefix, i);
+                    const start = blockSize * i;
+                    const end = i === numBlocks - 1 ? size : start + blockSize;
+                    const contentLength = end - start;
+                    blockList.push(blockID);
+                    await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {
+                        abortSignal: options.abortSignal,
+                        conditions: options.conditions,
+                        encryptionScope: options.encryptionScope,
+                        tracingOptions: updatedOptions.tracingOptions,
+                    });
+                    // Update progress after block is successfully uploaded to server, in case of block trying
+                    // TODO: Hook with convenience layer progress event in finer level
+                    transferProgress += contentLength;
+                    if (options.onProgress) {
+                        options.onProgress({
+                            loadedBytes: transferProgress,
+                        });
+                    }
+                });
+            }
+            await batch.do();
+            return this.commitBlockList(blockList, updatedOptions);
+        });
+    }
+    /**
+     * ONLY AVAILABLE IN NODE.JS RUNTIME.
+     *
+     * Uploads a local file in blocks to a block blob.
+     *
+     * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.
+     * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
+     * to commit the block list.
+     *
+     * @param filePath - Full path of local file
+     * @param options - Options to Upload to Block Blob operation.
+     * @returns Response data for the Blob Upload operation.
+     */
+    async uploadFile(filePath, options = {}) {
+        return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => {
+            const size = (await fsStat(filePath)).size;
+            return this.uploadSeekableInternal((offset, count) => {
+                return () => fsCreateReadStream(filePath, {
+                    autoClose: true,
+                    end: count ? offset + count - 1 : Infinity,
+                    start: offset,
+                });
+            }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }));
+        });
+    }
+    /**
+     * ONLY AVAILABLE IN NODE.JS RUNTIME.
+     *
+     * Uploads a Node.js Readable stream into block blob.
+     *
+     * PERFORMANCE IMPROVEMENT TIPS:
+     * * Input stream highWaterMark is better to set a same value with bufferSize
+     *    parameter, which will avoid Buffer.concat() operations.
+     *
+     * @param stream - Node.js Readable stream
+     * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB
+     * @param maxConcurrency -  Max concurrency indicates the max number of buffers that can be allocated,
+     *                                 positive correlation with max uploading concurrency. Default value is 5
+     * @param options - Options to Upload Stream to Block Blob operation.
+     * @returns Response data for the Blob Upload operation.
+     */
+    async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) {
+        if (!options.blobHTTPHeaders) {
+            options.blobHTTPHeaders = {};
+        }
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => {
+            let blockNum = 0;
+            const blockIDPrefix = coreUtil.randomUUID();
+            let transferProgress = 0;
+            const blockList = [];
+            const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => {
+                const blockID = generateBlockID(blockIDPrefix, blockNum);
+                blockList.push(blockID);
+                blockNum++;
+                await this.stageBlock(blockID, body, length, {
+                    customerProvidedKey: options.customerProvidedKey,
+                    conditions: options.conditions,
+                    encryptionScope: options.encryptionScope,
+                    tracingOptions: updatedOptions.tracingOptions,
+                });
+                // Update progress after block is successfully uploaded to server, in case of block trying
+                transferProgress += length;
+                if (options.onProgress) {
+                    options.onProgress({ loadedBytes: transferProgress });
+                }
+            }, 
+            // concurrency should set a smaller value than maxConcurrency, which is helpful to
+            // reduce the possibility when a outgoing handler waits for stream data, in
+            // this situation, outgoing handlers are blocked.
+            // Outgoing queue shouldn't be empty.
+            Math.ceil((maxConcurrency / 4) * 3));
+            await scheduler.do();
+            return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })));
+        });
+    }
+}
+/**
+ * PageBlobClient defines a set of operations applicable to page blobs.
+ */
+class PageBlobClient extends BlobClient {
+    constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
+        //   super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+        let pipeline;
+        let url;
+        options = options || {};
+        if (isPipelineLike(credentialOrPipelineOrContainerName)) {
+            // (url: string, pipeline: Pipeline)
+            url = urlOrConnectionString;
+            pipeline = credentialOrPipelineOrContainerName;
+        }
+        else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+            credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+            coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            url = urlOrConnectionString;
+            options = blobNameOrOptions;
+            pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+        }
+        else if (!credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName !== "string") {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            // The second parameter is undefined. Use anonymous credential.
+            url = urlOrConnectionString;
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        else if (credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName === "string" &&
+            blobNameOrOptions &&
+            typeof blobNameOrOptions === "string") {
+            // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+            const containerName = credentialOrPipelineOrContainerName;
+            const blobName = blobNameOrOptions;
+            const extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+            if (extractedCreds.kind === "AccountConnString") {
+                if (coreUtil.isNode) {
+                    const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+                    url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+                    if (!options.proxyOptions) {
+                        options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri);
+                    }
+                    pipeline = newPipeline(sharedKeyCredential, options);
+                }
+                else {
+                    throw new Error("Account connection string is only supported in Node.js environment");
+                }
+            }
+            else if (extractedCreds.kind === "SASConnString") {
+                url =
+                    appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+                        "?" +
+                        extractedCreds.accountSas;
+                pipeline = newPipeline(new AnonymousCredential(), options);
+            }
+            else {
+                throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+            }
+        }
+        else {
+            throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+        }
+        super(url, pipeline);
+        this.pageBlobContext = this.storageClientContext.pageBlob;
+    }
+    /**
+     * Creates a new PageBlobClient object identical to the source but with the
+     * specified snapshot timestamp.
+     * Provide "" will remove the snapshot and return a Client to the base blob.
+     *
+     * @param snapshot - The snapshot timestamp.
+     * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.
+     */
+    withSnapshot(snapshot) {
+        return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+    }
+    /**
+     * Creates a page blob of the specified length. Call uploadPages to upload data
+     * data to a page blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+     *
+     * @param size - size of the page blob.
+     * @param options - Options to the Page Blob Create operation.
+     * @returns Response data for the Page Blob Create operation.
+     */
+    async create(size, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => {
+            var _a, _b, _c;
+            return assertResponse(await this.pageBlobContext.create(0, size, {
+                abortSignal: options.abortSignal,
+                blobHttpHeaders: options.blobHTTPHeaders,
+                blobSequenceNumber: options.blobSequenceNumber,
+                leaseAccessConditions: options.conditions,
+                metadata: options.metadata,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn,
+                immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode,
+                legalHold: options.legalHold,
+                tier: toAccessTier(options.tier),
+                blobTagsString: toBlobTagsString(options.tags),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Creates a page blob of the specified length. Call uploadPages to upload data
+     * data to a page blob. If the blob with the same name already exists, the content
+     * of the existing blob will remain unchanged.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+     *
+     * @param size - size of the page blob.
+     * @param options -
+     */
+    async createIfNotExists(size, options = {}) {
+        return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => {
+            var _a, _b;
+            try {
+                const conditions = { ifNoneMatch: ETagAny };
+                const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })));
+                return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
+            }
+            catch (e) {
+                if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") {
+                    return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
+                }
+                throw e;
+            }
+        });
+    }
+    /**
+     * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-page
+     *
+     * @param body - Data to upload
+     * @param offset - Offset of destination page blob
+     * @param count - Content length of the body, also number of bytes to be uploaded
+     * @param options - Options to the Page Blob Upload Pages operation.
+     * @returns Response data for the Page Blob Upload Pages operation.
+     */
+    async uploadPages(body, offset, count, options = {}) {
+        options.conditions = options.conditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.uploadPages(count, body, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                requestOptions: {
+                    onUploadProgress: options.onProgress,
+                },
+                range: rangeToString({ offset, count }),
+                sequenceNumberAccessConditions: options.conditions,
+                transactionalContentMD5: options.transactionalContentMD5,
+                transactionalContentCrc64: options.transactionalContentCrc64,
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * The Upload Pages operation writes a range of pages to a page blob where the
+     * contents are read from a URL.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url
+     *
+     * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication
+     * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob
+     * @param destOffset - Offset of destination page blob
+     * @param count - Number of bytes to be uploaded from source page blob
+     * @param options -
+     */
+    async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) {
+        options.conditions = options.conditions || {};
+        options.sourceConditions = options.sourceConditions || {};
+        ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+        return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => {
+            var _a, _b, _c, _d, _e;
+            return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), {
+                abortSignal: options.abortSignal,
+                sourceContentMD5: options.sourceContentMD5,
+                sourceContentCrc64: options.sourceContentCrc64,
+                leaseAccessConditions: options.conditions,
+                sequenceNumberAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                sourceModifiedAccessConditions: {
+                    sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch,
+                    sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince,
+                    sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch,
+                    sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince,
+                },
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Frees the specified pages from the page blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-page
+     *
+     * @param offset - Starting byte position of the pages to clear.
+     * @param count - Number of bytes to clear.
+     * @param options - Options to the Page Blob Clear Pages operation.
+     * @returns Response data for the Page Blob Clear Pages operation.
+     */
+    async clearPages(offset = 0, count, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.clearPages(0, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                range: rangeToString({ offset, count }),
+                sequenceNumberAccessConditions: options.conditions,
+                cpkInfo: options.customerProvidedKey,
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Returns the list of valid page ranges for a page blob or snapshot of a page blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param options - Options to the Page Blob Get Ranges operation.
+     * @returns Response data for the Page Blob Get Ranges operation.
+     */
+    async getPageRanges(offset = 0, count, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => {
+            var _a;
+            const response = assertResponse(await this.pageBlobContext.getPageRanges({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                range: rangeToString({ offset, count }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            return rangeResponseFromModel(response);
+        });
+    }
+    /**
+     * getPageRangesSegment returns a single segment of page ranges starting from the
+     * specified Marker. Use an empty Marker to start enumeration from the beginning.
+     * After getting a segment, process it, and then call getPageRangesSegment again
+     * (passing the the previously-returned Marker) to get the next segment.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.
+     * @param options - Options to PageBlob Get Page Ranges Segment operation.
+     */
+    async listPageRangesSegment(offset = 0, count, marker, options = {}) {
+        return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.getPageRanges({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                range: rangeToString({ offset, count }),
+                marker: marker,
+                maxPageSize: options.maxPageSize,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param marker - A string value that identifies the portion of
+     *                          the get of page ranges to be returned with the next getting operation. The
+     *                          operation returns the ContinuationToken value within the response body if the
+     *                          getting operation did not return all page ranges remaining within the current page.
+     *                          The ContinuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of get
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to List Page Ranges operation.
+     */
+    listPageRangeItemSegments() {
+        return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) {
+            let getPageRangeItemSegmentsResponse;
+            if (!!marker || marker === undefined) {
+                do {
+                    getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options));
+                    marker = getPageRangeItemSegmentsResponse.continuationToken;
+                    yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse));
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param options - Options to List Page Ranges operation.
+     */
+    listPageRangeItems() {
+        return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) {
+            var _a, e_1, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const getPageRangesSegment = _c;
+                    yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment))));
+                }
+            }
+            catch (e_1_1) { e_1 = { error: e_1_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_1) throw e_1.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to list of page ranges for a page blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     *  .byPage() returns an async iterable iterator to list of page ranges for a page blob.
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * // Get the pageBlobClient before you run these snippets,
+     * // Can be obtained from `blobServiceClient.getContainerClient("<your-container-name>").getPageBlobClient("<your-blob-name>");`
+     * let i = 1;
+     * for await (const pageRange of pageBlobClient.listPageRanges()) {
+     *   console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let i = 1;
+     * let iter = pageBlobClient.listPageRanges();
+     * let pageRangeItem = await iter.next();
+     * while (!pageRangeItem.done) {
+     *   console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);
+     *   pageRangeItem = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * // passing optional maxPageSize in the page settings
+     * let i = 1;
+     * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {
+     *   for (const pageRange of response) {
+     *     console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a marker:
+     *
+     * ```js
+     * let i = 1;
+     * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });
+     * let response = (await iterator.next()).value;
+     *
+     * // Prints 2 page ranges
+     * for (const pageRange of response) {
+     *   console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     * }
+     *
+     * // Gets next marker
+     * let marker = response.continuationToken;
+     *
+     * // Passing next marker as continuationToken
+     *
+     * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });
+     * response = (await iterator.next()).value;
+     *
+     * // Prints 10 page ranges
+     * for (const blob of response) {
+     *   console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     * }
+     * ```
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param options - Options to the Page Blob Get Ranges operation.
+     * @returns An asyncIterableIterator that supports paging.
+     */
+    listPageRanges(offset = 0, count, options = {}) {
+        options.conditions = options.conditions || {};
+        // AsyncIterableIterator to iterate over blobs
+        const iter = this.listPageRangeItems(offset, count, options);
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options));
+            },
+        };
+    }
+    /**
+     * Gets the collection of page ranges that differ between a specified snapshot and this page blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     * @param offset - Starting byte position of the page blob
+     * @param count - Number of bytes to get ranges diff.
+     * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.
+     * @param options - Options to the Page Blob Get Page Ranges Diff operation.
+     * @returns Response data for the Page Blob Get Page Range Diff operation.
+     */
+    async getPageRangesDiff(offset, count, prevSnapshot, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => {
+            var _a;
+            const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                prevsnapshot: prevSnapshot,
+                range: rangeToString({ offset, count }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            return rangeResponseFromModel(result);
+        });
+    }
+    /**
+     * getPageRangesDiffSegment returns a single segment of page ranges starting from the
+     * specified Marker for difference between previous snapshot and the target page blob.
+     * Use an empty Marker to start enumeration from the beginning.
+     * After getting a segment, process it, and then call getPageRangesDiffSegment again
+     * (passing the the previously-returned Marker) to get the next segment.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.
+     * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.
+     * @param options - Options to the Page Blob Get Page Ranges Diff operation.
+     */
+    async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) {
+        return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.getPageRangesDiff({
+                abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal,
+                leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                prevsnapshot: prevSnapshotOrUrl,
+                range: rangeToString({
+                    offset: offset,
+                    count: count,
+                }),
+                marker: marker,
+                maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}
+     *
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.
+     * @param marker - A string value that identifies the portion of
+     *                          the get of page ranges to be returned with the next getting operation. The
+     *                          operation returns the ContinuationToken value within the response body if the
+     *                          getting operation did not return all page ranges remaining within the current page.
+     *                          The ContinuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of get
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to the Page Blob Get Page Ranges Diff operation.
+     */
+    listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) {
+        return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() {
+            let getPageRangeItemSegmentsResponse;
+            if (!!marker || marker === undefined) {
+                do {
+                    getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options));
+                    marker = getPageRangeItemSegmentsResponse.continuationToken;
+                    yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse));
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects
+     *
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.
+     * @param options - Options to the Page Blob Get Page Ranges Diff operation.
+     */
+    listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) {
+        return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() {
+            var _a, e_2, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const getPageRangesSegment = _c;
+                    yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment))));
+                }
+            }
+            catch (e_2_1) { e_2 = { error: e_2_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_2) throw e_2.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     *  .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * // Get the pageBlobClient before you run these snippets,
+     * // Can be obtained from `blobServiceClient.getContainerClient("<your-container-name>").getPageBlobClient("<your-blob-name>");`
+     * let i = 1;
+     * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {
+     *   console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let i = 1;
+     * let iter = pageBlobClient.listPageRangesDiff();
+     * let pageRangeItem = await iter.next();
+     * while (!pageRangeItem.done) {
+     *   console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);
+     *   pageRangeItem = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * // passing optional maxPageSize in the page settings
+     * let i = 1;
+     * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {
+     *   for (const pageRange of response) {
+     *     console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a marker:
+     *
+     * ```js
+     * let i = 1;
+     * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });
+     * let response = (await iterator.next()).value;
+     *
+     * // Prints 2 page ranges
+     * for (const pageRange of response) {
+     *   console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     * }
+     *
+     * // Gets next marker
+     * let marker = response.continuationToken;
+     *
+     * // Passing next marker as continuationToken
+     *
+     * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });
+     * response = (await iterator.next()).value;
+     *
+     * // Prints 10 page ranges
+     * for (const blob of response) {
+     *   console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);
+     * }
+     * ```
+     * @param offset - Starting byte position of the page ranges.
+     * @param count - Number of bytes to get.
+     * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.
+     * @param options - Options to the Page Blob Get Ranges operation.
+     * @returns An asyncIterableIterator that supports paging.
+     */
+    listPageRangesDiff(offset, count, prevSnapshot, options = {}) {
+        options.conditions = options.conditions || {};
+        // AsyncIterableIterator to iterate over blobs
+        const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options));
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options));
+            },
+        };
+    }
+    /**
+     * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.
+     * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+     *
+     * @param offset - Starting byte position of the page blob
+     * @param count - Number of bytes to get ranges diff.
+     * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.
+     * @param options - Options to the Page Blob Get Page Ranges Diff operation.
+     * @returns Response data for the Page Blob Get Page Range Diff operation.
+     */
+    async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => {
+            var _a;
+            const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                prevSnapshotUrl,
+                range: rangeToString({ offset, count }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            return rangeResponseFromModel(response);
+        });
+    }
+    /**
+     * Resizes the page blob to the specified size (which must be a multiple of 512).
+     * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties
+     *
+     * @param size - Target size
+     * @param options - Options to the Page Blob Resize operation.
+     * @returns Response data for the Page Blob Resize operation.
+     */
+    async resize(size, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.resize(size, {
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                encryptionScope: options.encryptionScope,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Sets a page blob's sequence number.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties
+     *
+     * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.
+     * @param sequenceNumber - Required if sequenceNumberAction is max or update
+     * @param options - Options to the Page Blob Update Sequence Number operation.
+     * @returns Response data for the Page Blob Update Sequence Number operation.
+     */
+    async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {
+                abortSignal: options.abortSignal,
+                blobSequenceNumber: sequenceNumber,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.
+     * The snapshot is copied such that only the differential changes between the previously
+     * copied snapshot are transferred to the destination.
+     * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.
+     * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob
+     * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots
+     *
+     * @param copySource - Specifies the name of the source page blob snapshot. For example,
+     *                            https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=<DateTime>
+     * @param options - Options to the Page Blob Copy Incremental operation.
+     * @returns Response data for the Page Blob Copy Incremental operation.
+     */
+    async startCopyIncremental(copySource, options = {}) {
+        return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => {
+            var _a;
+            return assertResponse(await this.pageBlobContext.copyIncremental(copySource, {
+                abortSignal: options.abortSignal,
+                modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+async function getBodyAsText(batchResponse) {
+    let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);
+    const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer);
+    // Slice the buffer to trim the empty ending.
+    buffer = buffer.slice(0, responseLength);
+    return buffer.toString();
+}
+function utf8ByteLength(str) {
+    return Buffer.byteLength(str);
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+const HTTP_HEADER_DELIMITER = ": ";
+const SPACE_DELIMITER = " ";
+const NOT_FOUND = -1;
+/**
+ * Util class for parsing batch response.
+ */
+class BatchResponseParser {
+    constructor(batchResponse, subRequests) {
+        if (!batchResponse || !batchResponse.contentType) {
+            // In special case(reported), server may return invalid content-type which could not be parsed.
+            throw new RangeError("batchResponse is malformed or doesn't contain valid content-type.");
+        }
+        if (!subRequests || subRequests.size === 0) {
+            // This should be prevent during coding.
+            throw new RangeError("Invalid state: subRequests is not provided or size is 0.");
+        }
+        this.batchResponse = batchResponse;
+        this.subRequests = subRequests;
+        this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1];
+        this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;
+        this.batchResponseEnding = `--${this.responseBatchBoundary}--`;
+    }
+    // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response
+    async parseBatchResponse() {
+        // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse
+        // sub request's response.
+        if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {
+            throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`);
+        }
+        const responseBodyAsText = await getBodyAsText(this.batchResponse);
+        const subResponses = responseBodyAsText
+            .split(this.batchResponseEnding)[0] // string after ending is useless
+            .split(this.perResponsePrefix)
+            .slice(1); // string before first response boundary is useless
+        const subResponseCount = subResponses.length;
+        // Defensive coding in case of potential error parsing.
+        // Note: subResponseCount == 1 is special case where sub request is invalid.
+        // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.
+        // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.
+        if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {
+            throw new Error("Invalid state: sub responses' count is not equal to sub requests' count.");
+        }
+        const deserializedSubResponses = new Array(subResponseCount);
+        let subResponsesSucceededCount = 0;
+        let subResponsesFailedCount = 0;
+        // Parse sub subResponses.
+        for (let index = 0; index < subResponseCount; index++) {
+            const subResponse = subResponses[index];
+            const deserializedSubResponse = {};
+            deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders());
+            const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);
+            let subRespHeaderStartFound = false;
+            let subRespHeaderEndFound = false;
+            let subRespFailed = false;
+            let contentId = NOT_FOUND;
+            for (const responseLine of responseLines) {
+                if (!subRespHeaderStartFound) {
+                    // Convention line to indicate content ID
+                    if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {
+                        contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);
+                    }
+                    // Http version line with status code indicates the start of sub request's response.
+                    // Example: HTTP/1.1 202 Accepted
+                    if (responseLine.startsWith(HTTP_VERSION_1_1)) {
+                        subRespHeaderStartFound = true;
+                        const tokens = responseLine.split(SPACE_DELIMITER);
+                        deserializedSubResponse.status = parseInt(tokens[1]);
+                        deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);
+                    }
+                    continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *
+                }
+                if (responseLine.trim() === "") {
+                    // Sub response's header start line already found, and the first empty line indicates header end line found.
+                    if (!subRespHeaderEndFound) {
+                        subRespHeaderEndFound = true;
+                    }
+                    continue; // Skip empty line
+                }
+                // Note: when code reach here, it indicates subRespHeaderStartFound == true
+                if (!subRespHeaderEndFound) {
+                    if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {
+                        // Defensive coding to prevent from missing valuable lines.
+                        throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`);
+                    }
+                    // Parse headers of sub response.
+                    const tokens = responseLine.split(HTTP_HEADER_DELIMITER);
+                    deserializedSubResponse.headers.set(tokens[0], tokens[1]);
+                    if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {
+                        deserializedSubResponse.errorCode = tokens[1];
+                        subRespFailed = true;
+                    }
+                }
+                else {
+                    // Assemble body of sub response.
+                    if (!deserializedSubResponse.bodyAsText) {
+                        deserializedSubResponse.bodyAsText = "";
+                    }
+                    deserializedSubResponse.bodyAsText += responseLine;
+                }
+            } // Inner for end
+            // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.
+            // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it
+            // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that
+            // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.
+            if (contentId !== NOT_FOUND &&
+                Number.isInteger(contentId) &&
+                contentId >= 0 &&
+                contentId < this.subRequests.size &&
+                deserializedSubResponses[contentId] === undefined) {
+                deserializedSubResponse._request = this.subRequests.get(contentId);
+                deserializedSubResponses[contentId] = deserializedSubResponse;
+            }
+            else {
+                logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`);
+            }
+            if (subRespFailed) {
+                subResponsesFailedCount++;
+            }
+            else {
+                subResponsesSucceededCount++;
+            }
+        }
+        return {
+            subResponses: deserializedSubResponses,
+            subResponsesSucceededCount: subResponsesSucceededCount,
+            subResponsesFailedCount: subResponsesFailedCount,
+        };
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+var MutexLockStatus;
+(function (MutexLockStatus) {
+    MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED";
+    MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED";
+})(MutexLockStatus || (MutexLockStatus = {}));
+/**
+ * An async mutex lock.
+ */
+class Mutex {
+    /**
+     * Lock for a specific key. If the lock has been acquired by another customer, then
+     * will wait until getting the lock.
+     *
+     * @param key - lock key
+     */
+    static async lock(key) {
+        return new Promise((resolve) => {
+            if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {
+                this.keys[key] = MutexLockStatus.LOCKED;
+                resolve();
+            }
+            else {
+                this.onUnlockEvent(key, () => {
+                    this.keys[key] = MutexLockStatus.LOCKED;
+                    resolve();
+                });
+            }
+        });
+    }
+    /**
+     * Unlock a key.
+     *
+     * @param key -
+     */
+    static async unlock(key) {
+        return new Promise((resolve) => {
+            if (this.keys[key] === MutexLockStatus.LOCKED) {
+                this.emitUnlockEvent(key);
+            }
+            delete this.keys[key];
+            resolve();
+        });
+    }
+    static onUnlockEvent(key, handler) {
+        if (this.listeners[key] === undefined) {
+            this.listeners[key] = [handler];
+        }
+        else {
+            this.listeners[key].push(handler);
+        }
+    }
+    static emitUnlockEvent(key) {
+        if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {
+            const handler = this.listeners[key].shift();
+            setImmediate(() => {
+                handler.call(this);
+            });
+        }
+    }
+}
+Mutex.keys = {};
+Mutex.listeners = {};
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * A BlobBatch represents an aggregated set of operations on blobs.
+ * Currently, only `delete` and `setAccessTier` are supported.
+ */
+class BlobBatch {
+    constructor() {
+        this.batch = "batch";
+        this.batchRequest = new InnerBatchRequest();
+    }
+    /**
+     * Get the value of Content-Type for a batch request.
+     * The value must be multipart/mixed with a batch boundary.
+     * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252
+     */
+    getMultiPartContentType() {
+        return this.batchRequest.getMultipartContentType();
+    }
+    /**
+     * Get assembled HTTP request body for sub requests.
+     */
+    getHttpRequestBody() {
+        return this.batchRequest.getHttpRequestBody();
+    }
+    /**
+     * Get sub requests that are added into the batch request.
+     */
+    getSubRequests() {
+        return this.batchRequest.getSubRequests();
+    }
+    async addSubRequestInternal(subRequest, assembleSubRequestFunc) {
+        await Mutex.lock(this.batch);
+        try {
+            this.batchRequest.preAddSubRequest(subRequest);
+            await assembleSubRequestFunc();
+            this.batchRequest.postAddSubRequest(subRequest);
+        }
+        finally {
+            await Mutex.unlock(this.batch);
+        }
+    }
+    setBatchType(batchType) {
+        if (!this.batchType) {
+            this.batchType = batchType;
+        }
+        if (this.batchType !== batchType) {
+            throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`);
+        }
+    }
+    async deleteBlob(urlOrBlobClient, credentialOrOptions, options) {
+        let url;
+        let credential;
+        if (typeof urlOrBlobClient === "string" &&
+            ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||
+                credentialOrOptions instanceof AnonymousCredential ||
+                coreAuth.isTokenCredential(credentialOrOptions))) {
+            // First overload
+            url = urlOrBlobClient;
+            credential = credentialOrOptions;
+        }
+        else if (urlOrBlobClient instanceof BlobClient) {
+            // Second overload
+            url = urlOrBlobClient.url;
+            credential = urlOrBlobClient.credential;
+            options = credentialOrOptions;
+        }
+        else {
+            throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided.");
+        }
+        if (!options) {
+            options = {};
+        }
+        return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => {
+            this.setBatchType("delete");
+            await this.addSubRequestInternal({
+                url: url,
+                credential: credential,
+            }, async () => {
+                await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions);
+            });
+        });
+    }
+    async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) {
+        let url;
+        let credential;
+        let tier;
+        if (typeof urlOrBlobClient === "string" &&
+            ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||
+                credentialOrTier instanceof AnonymousCredential ||
+                coreAuth.isTokenCredential(credentialOrTier))) {
+            // First overload
+            url = urlOrBlobClient;
+            credential = credentialOrTier;
+            tier = tierOrOptions;
+        }
+        else if (urlOrBlobClient instanceof BlobClient) {
+            // Second overload
+            url = urlOrBlobClient.url;
+            credential = urlOrBlobClient.credential;
+            tier = credentialOrTier;
+            options = tierOrOptions;
+        }
+        else {
+            throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided.");
+        }
+        if (!options) {
+            options = {};
+        }
+        return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => {
+            this.setBatchType("setAccessTier");
+            await this.addSubRequestInternal({
+                url: url,
+                credential: credential,
+            }, async () => {
+                await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions);
+            });
+        });
+    }
+}
+/**
+ * Inner batch request class which is responsible for assembling and serializing sub requests.
+ * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.
+ */
+class InnerBatchRequest {
+    constructor() {
+        this.operationCount = 0;
+        this.body = "";
+        const tempGuid = coreUtil.randomUUID();
+        // batch_{batchid}
+        this.boundary = `batch_${tempGuid}`;
+        // --batch_{batchid}
+        // Content-Type: application/http
+        // Content-Transfer-Encoding: binary
+        this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;
+        // multipart/mixed; boundary=batch_{batchid}
+        this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;
+        // --batch_{batchid}--
+        this.batchRequestEnding = `--${this.boundary}--`;
+        this.subRequests = new Map();
+    }
+    /**
+     * Create pipeline to assemble sub requests. The idea here is to use existing
+     * credential and serialization/deserialization components, with additional policies to
+     * filter unnecessary headers, assemble sub requests into request's body
+     * and intercept request from going to wire.
+     * @param credential -  Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
+     */
+    createPipeline(credential) {
+        const corePipeline = coreRestPipeline.createEmptyPipeline();
+        corePipeline.addPolicy(coreClient.serializationPolicy({
+            stringifyXML: coreXml.stringifyXML,
+            serializerOptions: {
+                xml: {
+                    xmlCharKey: "#",
+                },
+            },
+        }), { phase: "Serialize" });
+        // Use batch header filter policy to exclude unnecessary headers
+        corePipeline.addPolicy(batchHeaderFilterPolicy());
+        // Use batch assemble policy to assemble request and intercept request from going to wire
+        corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" });
+        if (coreAuth.isTokenCredential(credential)) {
+            corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({
+                credential,
+                scopes: StorageOAuthScopes,
+                challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge },
+            }), { phase: "Sign" });
+        }
+        else if (credential instanceof StorageSharedKeyCredential) {
+            corePipeline.addPolicy(storageSharedKeyCredentialPolicy({
+                accountName: credential.accountName,
+                accountKey: credential.accountKey,
+            }), { phase: "Sign" });
+        }
+        const pipeline = new Pipeline([]);
+        // attach the v2 pipeline to this one
+        pipeline._credential = credential;
+        pipeline._corePipeline = corePipeline;
+        return pipeline;
+    }
+    appendSubRequestToBody(request) {
+        // Start to assemble sub request
+        this.body += [
+            this.subRequestPrefix, // sub request constant prefix
+            `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID
+            "", // empty line after sub request's content ID
+            `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method
+        ].join(HTTP_LINE_ENDING);
+        for (const [name, value] of request.headers) {
+            this.body += `${name}: ${value}${HTTP_LINE_ENDING}`;
+        }
+        this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line
+        // No body to assemble for current batch request support
+        // End to assemble sub request
+    }
+    preAddSubRequest(subRequest) {
+        if (this.operationCount >= BATCH_MAX_REQUEST) {
+            throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);
+        }
+        // Fast fail if url for sub request is invalid
+        const path = getURLPath(subRequest.url);
+        if (!path || path === "") {
+            throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);
+        }
+    }
+    postAddSubRequest(subRequest) {
+        this.subRequests.set(this.operationCount, subRequest);
+        this.operationCount++;
+    }
+    // Return the http request body with assembling the ending line to the sub request body.
+    getHttpRequestBody() {
+        return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;
+    }
+    getMultipartContentType() {
+        return this.multipartContentType;
+    }
+    getSubRequests() {
+        return this.subRequests;
+    }
+}
+function batchRequestAssemblePolicy(batchRequest) {
+    return {
+        name: "batchRequestAssemblePolicy",
+        async sendRequest(request) {
+            batchRequest.appendSubRequestToBody(request);
+            return {
+                request,
+                status: 200,
+                headers: coreRestPipeline.createHttpHeaders(),
+            };
+        },
+    };
+}
+function batchHeaderFilterPolicy() {
+    return {
+        name: "batchHeaderFilterPolicy",
+        async sendRequest(request, next) {
+            let xMsHeaderName = "";
+            for (const [name] of request.headers) {
+                if (iEqual(name, HeaderConstants.X_MS_VERSION)) {
+                    xMsHeaderName = name;
+                }
+            }
+            if (xMsHeaderName !== "") {
+                request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header.
+            }
+            return next(request);
+        },
+    };
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+ */
+class BlobBatchClient {
+    constructor(url, credentialOrPipeline, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        let pipeline;
+        if (isPipelineLike(credentialOrPipeline)) {
+            pipeline = credentialOrPipeline;
+        }
+        else if (!credentialOrPipeline) {
+            // no credential provided
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        else {
+            pipeline = newPipeline(credentialOrPipeline, options);
+        }
+        const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline));
+        const path = getURLPath(url);
+        if (path && path !== "/") {
+            // Container scoped.
+            this.serviceOrContainerContext = storageClientContext.container;
+        }
+        else {
+            this.serviceOrContainerContext = storageClientContext.service;
+        }
+    }
+    /**
+     * Creates a {@link BlobBatch}.
+     * A BlobBatch represents an aggregated set of operations on blobs.
+     */
+    createBatch() {
+        return new BlobBatch();
+    }
+    async deleteBlobs(urlsOrBlobClients, credentialOrOptions, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        const batch = new BlobBatch();
+        for (const urlOrBlobClient of urlsOrBlobClients) {
+            if (typeof urlOrBlobClient === "string") {
+                await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options);
+            }
+            else {
+                await batch.deleteBlob(urlOrBlobClient, credentialOrOptions);
+            }
+        }
+        return this.submitBatch(batch);
+    }
+    async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        const batch = new BlobBatch();
+        for (const urlOrBlobClient of urlsOrBlobClients) {
+            if (typeof urlOrBlobClient === "string") {
+                await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options);
+            }
+            else {
+                await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions);
+            }
+        }
+        return this.submitBatch(batch);
+    }
+    /**
+     * Submit batch request which consists of multiple subrequests.
+     *
+     * Get `blobBatchClient` and other details before running the snippets.
+     * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`
+     *
+     * Example usage:
+     *
+     * ```js
+     * let batchRequest = new BlobBatch();
+     * await batchRequest.deleteBlob(urlInString0, credential0);
+     * await batchRequest.deleteBlob(urlInString1, credential1, {
+     *  deleteSnapshots: "include"
+     * });
+     * const batchResp = await blobBatchClient.submitBatch(batchRequest);
+     * console.log(batchResp.subResponsesSucceededCount);
+     * ```
+     *
+     * Example using a lease:
+     *
+     * ```js
+     * let batchRequest = new BlobBatch();
+     * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool");
+     * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", {
+     *  conditions: { leaseId: leaseId }
+     * });
+     * const batchResp = await blobBatchClient.submitBatch(batchRequest);
+     * console.log(batchResp.subResponsesSucceededCount);
+     * ```
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+     *
+     * @param batchRequest - A set of Delete or SetTier operations.
+     * @param options -
+     */
+    async submitBatch(batchRequest, options = {}) {
+        if (!batchRequest || batchRequest.getSubRequests().size === 0) {
+            throw new RangeError("Batch request should contain one or more sub requests.");
+        }
+        return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => {
+            const batchRequestBody = batchRequest.getHttpRequestBody();
+            // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.
+            const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions)));
+            // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).
+            const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests());
+            const responseSummary = await batchResponseParser.parseBatchResponse();
+            const res = {
+                _response: rawBatchResponse._response,
+                contentType: rawBatchResponse.contentType,
+                errorCode: rawBatchResponse.errorCode,
+                requestId: rawBatchResponse.requestId,
+                clientRequestId: rawBatchResponse.clientRequestId,
+                version: rawBatchResponse.version,
+                subResponses: responseSummary.subResponses,
+                subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,
+                subResponsesFailedCount: responseSummary.subResponsesFailedCount,
+            };
+            return res;
+        });
+    }
+}
+
+/**
+ * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.
+ */
+class ContainerClient extends StorageClient {
+    /**
+     * The name of the container.
+     */
+    get containerName() {
+        return this._containerName;
+    }
+    constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        let pipeline;
+        let url;
+        options = options || {};
+        if (isPipelineLike(credentialOrPipelineOrContainerName)) {
+            // (url: string, pipeline: Pipeline)
+            url = urlOrConnectionString;
+            pipeline = credentialOrPipelineOrContainerName;
+        }
+        else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+            credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+            coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            url = urlOrConnectionString;
+            pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+        }
+        else if (!credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName !== "string") {
+            // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+            // The second parameter is undefined. Use anonymous credential.
+            url = urlOrConnectionString;
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        else if (credentialOrPipelineOrContainerName &&
+            typeof credentialOrPipelineOrContainerName === "string") {
+            // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+            const containerName = credentialOrPipelineOrContainerName;
+            const extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+            if (extractedCreds.kind === "AccountConnString") {
+                if (coreUtil.isNode) {
+                    const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+                    url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));
+                    if (!options.proxyOptions) {
+                        options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri);
+                    }
+                    pipeline = newPipeline(sharedKeyCredential, options);
+                }
+                else {
+                    throw new Error("Account connection string is only supported in Node.js environment");
+                }
+            }
+            else if (extractedCreds.kind === "SASConnString") {
+                url =
+                    appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +
+                        "?" +
+                        extractedCreds.accountSas;
+                pipeline = newPipeline(new AnonymousCredential(), options);
+            }
+            else {
+                throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+            }
+        }
+        else {
+            throw new Error("Expecting non-empty strings for containerName parameter");
+        }
+        super(url, pipeline);
+        this._containerName = this.getContainerNameFromUrl();
+        this.containerContext = this.storageClientContext.container;
+    }
+    /**
+     * Creates a new container under the specified account. If the container with
+     * the same name already exists, the operation fails.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
+     * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
+     *
+     * @param options - Options to Container Create operation.
+     *
+     *
+     * Example usage:
+     *
+     * ```js
+     * const containerClient = blobServiceClient.getContainerClient("<container name>");
+     * const createContainerResponse = await containerClient.create();
+     * console.log("Container was created successfully", createContainerResponse.requestId);
+     * ```
+     */
+    async create(options = {}) {
+        return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => {
+            return assertResponse(await this.containerContext.create(updatedOptions));
+        });
+    }
+    /**
+     * Creates a new container under the specified account. If the container with
+     * the same name already exists, it is not changed.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
+     * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
+     *
+     * @param options -
+     */
+    async createIfNotExists(options = {}) {
+        return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => {
+            var _a, _b;
+            try {
+                const res = await this.create(updatedOptions);
+                return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
+            }
+            catch (e) {
+                if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") {
+                    return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
+                }
+                else {
+                    throw e;
+                }
+            }
+        });
+    }
+    /**
+     * Returns true if the Azure container resource represented by this client exists; false otherwise.
+     *
+     * NOTE: use this function with care since an existing container might be deleted by other clients or
+     * applications. Vice versa new containers with the same name might be added by other clients or
+     * applications after this function completes.
+     *
+     * @param options -
+     */
+    async exists(options = {}) {
+        return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => {
+            try {
+                await this.getProperties({
+                    abortSignal: options.abortSignal,
+                    tracingOptions: updatedOptions.tracingOptions,
+                });
+                return true;
+            }
+            catch (e) {
+                if (e.statusCode === 404) {
+                    return false;
+                }
+                throw e;
+            }
+        });
+    }
+    /**
+     * Creates a {@link BlobClient}
+     *
+     * @param blobName - A blob name
+     * @returns A new BlobClient object for the given blob name.
+     */
+    getBlobClient(blobName) {
+        return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);
+    }
+    /**
+     * Creates an {@link AppendBlobClient}
+     *
+     * @param blobName - An append blob name
+     */
+    getAppendBlobClient(blobName) {
+        return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);
+    }
+    /**
+     * Creates a {@link BlockBlobClient}
+     *
+     * @param blobName - A block blob name
+     *
+     *
+     * Example usage:
+     *
+     * ```js
+     * const content = "Hello world!";
+     *
+     * const blockBlobClient = containerClient.getBlockBlobClient("<blob name>");
+     * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
+     * ```
+     */
+    getBlockBlobClient(blobName) {
+        return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);
+    }
+    /**
+     * Creates a {@link PageBlobClient}
+     *
+     * @param blobName - A page blob name
+     */
+    getPageBlobClient(blobName) {
+        return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline);
+    }
+    /**
+     * Returns all user-defined metadata and system properties for the specified
+     * container. The data returned does not include the container's list of blobs.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties
+     *
+     * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
+     * they originally contained uppercase characters. This differs from the metadata keys returned by
+     * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which
+     * will retain their original casing.
+     *
+     * @param options - Options to Container Get Properties operation.
+     */
+    async getProperties(options = {}) {
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => {
+            return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions })));
+        });
+    }
+    /**
+     * Marks the specified container for deletion. The container and any blobs
+     * contained within it are later deleted during garbage collection.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container
+     *
+     * @param options - Options to Container Delete operation.
+     */
+    async delete(options = {}) {
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => {
+            return assertResponse(await this.containerContext.delete({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: options.conditions,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Marks the specified container for deletion if it exists. The container and any blobs
+     * contained within it are later deleted during garbage collection.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container
+     *
+     * @param options - Options to Container Delete operation.
+     */
+    async deleteIfExists(options = {}) {
+        return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => {
+            var _a, _b;
+            try {
+                const res = await this.delete(updatedOptions);
+                return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response });
+            }
+            catch (e) {
+                if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") {
+                    return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response });
+                }
+                throw e;
+            }
+        });
+    }
+    /**
+     * Sets one or more user-defined name-value pairs for the specified container.
+     *
+     * If no option provided, or no metadata defined in the parameter, the container
+     * metadata will be removed.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata
+     *
+     * @param metadata - Replace existing metadata with this value.
+     *                            If no value provided the existing metadata will be removed.
+     * @param options - Options to Container Set Metadata operation.
+     */
+    async setMetadata(metadata, options = {}) {
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        if (options.conditions.ifUnmodifiedSince) {
+            throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service");
+        }
+        return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => {
+            return assertResponse(await this.containerContext.setMetadata({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                metadata,
+                modifiedAccessConditions: options.conditions,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Gets the permissions for the specified container. The permissions indicate
+     * whether container data may be accessed publicly.
+     *
+     * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.
+     * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z".
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl
+     *
+     * @param options - Options to Container Get Access Policy operation.
+     */
+    async getAccessPolicy(options = {}) {
+        if (!options.conditions) {
+            options.conditions = {};
+        }
+        return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => {
+            const response = assertResponse(await this.containerContext.getAccessPolicy({
+                abortSignal: options.abortSignal,
+                leaseAccessConditions: options.conditions,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            const res = {
+                _response: response._response,
+                blobPublicAccess: response.blobPublicAccess,
+                date: response.date,
+                etag: response.etag,
+                errorCode: response.errorCode,
+                lastModified: response.lastModified,
+                requestId: response.requestId,
+                clientRequestId: response.clientRequestId,
+                signedIdentifiers: [],
+                version: response.version,
+            };
+            for (const identifier of response) {
+                let accessPolicy = undefined;
+                if (identifier.accessPolicy) {
+                    accessPolicy = {
+                        permissions: identifier.accessPolicy.permissions,
+                    };
+                    if (identifier.accessPolicy.expiresOn) {
+                        accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);
+                    }
+                    if (identifier.accessPolicy.startsOn) {
+                        accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);
+                    }
+                }
+                res.signedIdentifiers.push({
+                    accessPolicy,
+                    id: identifier.id,
+                });
+            }
+            return res;
+        });
+    }
+    /**
+     * Sets the permissions for the specified container. The permissions indicate
+     * whether blobs in a container may be accessed publicly.
+     *
+     * When you set permissions for a container, the existing permissions are replaced.
+     * If no access or containerAcl provided, the existing container ACL will be
+     * removed.
+     *
+     * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.
+     * During this interval, a shared access signature that is associated with the stored access policy will
+     * fail with status code 403 (Forbidden), until the access policy becomes active.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl
+     *
+     * @param access - The level of public access to data in the container.
+     * @param containerAcl - Array of elements each having a unique Id and details of the access policy.
+     * @param options - Options to Container Set Access Policy operation.
+     */
+    async setAccessPolicy(access, containerAcl, options = {}) {
+        options.conditions = options.conditions || {};
+        return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => {
+            const acl = [];
+            for (const identifier of containerAcl || []) {
+                acl.push({
+                    accessPolicy: {
+                        expiresOn: identifier.accessPolicy.expiresOn
+                            ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)
+                            : "",
+                        permissions: identifier.accessPolicy.permissions,
+                        startsOn: identifier.accessPolicy.startsOn
+                            ? truncatedISO8061Date(identifier.accessPolicy.startsOn)
+                            : "",
+                    },
+                    id: identifier.id,
+                });
+            }
+            return assertResponse(await this.containerContext.setAccessPolicy({
+                abortSignal: options.abortSignal,
+                access,
+                containerAcl: acl,
+                leaseAccessConditions: options.conditions,
+                modifiedAccessConditions: options.conditions,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Get a {@link BlobLeaseClient} that manages leases on the container.
+     *
+     * @param proposeLeaseId - Initial proposed lease Id.
+     * @returns A new BlobLeaseClient object for managing leases on the container.
+     */
+    getBlobLeaseClient(proposeLeaseId) {
+        return new BlobLeaseClient(this, proposeLeaseId);
+    }
+    /**
+     * Creates a new block blob, or updates the content of an existing block blob.
+     *
+     * Updating an existing block blob overwrites any existing metadata on the blob.
+     * Partial updates are not supported; the content of the existing blob is
+     * overwritten with the new content. To perform a partial update of a block blob's,
+     * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.
+     *
+     * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},
+     * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better
+     * performance with concurrency uploading.
+     *
+     * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+     *
+     * @param blobName - Name of the block blob to create or update.
+     * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function
+     *                               which returns a new Readable stream whose offset is from data source beginning.
+     * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a
+     *                               string including non non-Base64/Hex-encoded characters.
+     * @param options - Options to configure the Block Blob Upload operation.
+     * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.
+     */
+    async uploadBlockBlob(blobName, body, contentLength, options = {}) {
+        return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => {
+            const blockBlobClient = this.getBlockBlobClient(blobName);
+            const response = await blockBlobClient.upload(body, contentLength, updatedOptions);
+            return {
+                blockBlobClient,
+                response,
+            };
+        });
+    }
+    /**
+     * Marks the specified blob or snapshot for deletion. The blob is later deleted
+     * during garbage collection. Note that in order to delete a blob, you must delete
+     * all of its snapshots. You can delete both at the same time with the Delete
+     * Blob operation.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
+     *
+     * @param blobName -
+     * @param options - Options to Blob Delete operation.
+     * @returns Block blob deletion response data.
+     */
+    async deleteBlob(blobName, options = {}) {
+        return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => {
+            let blobClient = this.getBlobClient(blobName);
+            if (options.versionId) {
+                blobClient = blobClient.withVersion(options.versionId);
+            }
+            return blobClient.delete(updatedOptions);
+        });
+    }
+    /**
+     * listBlobFlatSegment returns a single segment of blobs starting from the
+     * specified Marker. Use an empty Marker to start enumeration from the beginning.
+     * After getting a segment, process it, and then call listBlobsFlatSegment again
+     * (passing the the previously-returned Marker) to get the next segment.
+     * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs
+     *
+     * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.
+     * @param options - Options to Container List Blob Flat Segment operation.
+     */
+    async listBlobFlatSegment(marker, options = {}) {
+        return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => {
+            const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions })));
+            const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => {
+                        const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) });
+                        return blobItem;
+                    }) }) });
+            return wrappedResponse;
+        });
+    }
+    /**
+     * listBlobHierarchySegment returns a single segment of blobs starting from
+     * the specified Marker. Use an empty Marker to start enumeration from the
+     * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment
+     * again (passing the the previously-returned Marker) to get the next segment.
+     * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs
+     *
+     * @param delimiter - The character or string used to define the virtual hierarchy
+     * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.
+     * @param options - Options to Container List Blob Hierarchy Segment operation.
+     */
+    async listBlobHierarchySegment(delimiter, marker, options = {}) {
+        return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => {
+            var _a;
+            const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions })));
+            const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => {
+                        const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) });
+                        return blobItem;
+                    }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => {
+                        const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) });
+                        return blobPrefix;
+                    }) }) });
+            return wrappedResponse;
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse
+     *
+     * @param marker - A string value that identifies the portion of
+     *                          the list of blobs to be returned with the next listing operation. The
+     *                          operation returns the ContinuationToken value within the response body if the
+     *                          listing operation did not return all blobs remaining to be listed
+     *                          with the current page. The ContinuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of list
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to list blobs operation.
+     */
+    listSegments(marker_1) {
+        return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) {
+            let listBlobsFlatSegmentResponse;
+            if (!!marker || marker === undefined) {
+                do {
+                    listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options));
+                    marker = listBlobsFlatSegmentResponse.continuationToken;
+                    yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse));
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator of {@link BlobItem} objects
+     *
+     * @param options - Options to list blobs operation.
+     */
+    listItems() {
+        return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) {
+            var _a, e_1, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const listBlobsFlatSegmentResponse = _c;
+                    yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)));
+                }
+            }
+            catch (e_1_1) { e_1 = { error: e_1_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_1) throw e_1.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to list all the blobs
+     * under the specified account.
+     *
+     * .byPage() returns an async iterable iterator to list the blobs in pages.
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * // Get the containerClient before you run these snippets,
+     * // Can be obtained from `blobServiceClient.getContainerClient("<your-container-name>");`
+     * let i = 1;
+     * for await (const blob of containerClient.listBlobsFlat()) {
+     *   console.log(`Blob ${i++}: ${blob.name}`);
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let i = 1;
+     * let iter = containerClient.listBlobsFlat();
+     * let blobItem = await iter.next();
+     * while (!blobItem.done) {
+     *   console.log(`Blob ${i++}: ${blobItem.value.name}`);
+     *   blobItem = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * // passing optional maxPageSize in the page settings
+     * let i = 1;
+     * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {
+     *   for (const blob of response.segment.blobItems) {
+     *     console.log(`Blob ${i++}: ${blob.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a marker:
+     *
+     * ```js
+     * let i = 1;
+     * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });
+     * let response = (await iterator.next()).value;
+     *
+     * // Prints 2 blob names
+     * for (const blob of response.segment.blobItems) {
+     *   console.log(`Blob ${i++}: ${blob.name}`);
+     * }
+     *
+     * // Gets next marker
+     * let marker = response.continuationToken;
+     *
+     * // Passing next marker as continuationToken
+     *
+     * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });
+     * response = (await iterator.next()).value;
+     *
+     * // Prints 10 blob names
+     * for (const blob of response.segment.blobItems) {
+     *   console.log(`Blob ${i++}: ${blob.name}`);
+     * }
+     * ```
+     *
+     * @param options - Options to list blobs.
+     * @returns An asyncIterableIterator that supports paging.
+     */
+    listBlobsFlat(options = {}) {
+        const include = [];
+        if (options.includeCopy) {
+            include.push("copy");
+        }
+        if (options.includeDeleted) {
+            include.push("deleted");
+        }
+        if (options.includeMetadata) {
+            include.push("metadata");
+        }
+        if (options.includeSnapshots) {
+            include.push("snapshots");
+        }
+        if (options.includeVersions) {
+            include.push("versions");
+        }
+        if (options.includeUncommitedBlobs) {
+            include.push("uncommittedblobs");
+        }
+        if (options.includeTags) {
+            include.push("tags");
+        }
+        if (options.includeDeletedWithVersions) {
+            include.push("deletedwithversions");
+        }
+        if (options.includeImmutabilityPolicy) {
+            include.push("immutabilitypolicy");
+        }
+        if (options.includeLegalHold) {
+            include.push("legalhold");
+        }
+        if (options.prefix === "") {
+            options.prefix = undefined;
+        }
+        const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {}));
+        // AsyncIterableIterator to iterate over blobs
+        const iter = this.listItems(updatedOptions);
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions));
+            },
+        };
+    }
+    /**
+     * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse
+     *
+     * @param delimiter - The character or string used to define the virtual hierarchy
+     * @param marker - A string value that identifies the portion of
+     *                          the list of blobs to be returned with the next listing operation. The
+     *                          operation returns the ContinuationToken value within the response body if the
+     *                          listing operation did not return all blobs remaining to be listed
+     *                          with the current page. The ContinuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of list
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to list blobs operation.
+     */
+    listHierarchySegments(delimiter_1, marker_1) {
+        return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) {
+            let listBlobsHierarchySegmentResponse;
+            if (!!marker || marker === undefined) {
+                do {
+                    listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options));
+                    marker = listBlobsHierarchySegmentResponse.continuationToken;
+                    yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse));
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.
+     *
+     * @param delimiter - The character or string used to define the virtual hierarchy
+     * @param options - Options to list blobs operation.
+     */
+    listItemsByHierarchy(delimiter_1) {
+        return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) {
+            var _a, e_2, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const listBlobsHierarchySegmentResponse = _c;
+                    const segment = listBlobsHierarchySegmentResponse.segment;
+                    if (segment.blobPrefixes) {
+                        for (const prefix of segment.blobPrefixes) {
+                            yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix));
+                        }
+                    }
+                    for (const blob of segment.blobItems) {
+                        yield yield tslib.__await(Object.assign({ kind: "blob" }, blob));
+                    }
+                }
+            }
+            catch (e_2_1) { e_2 = { error: e_2_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_2) throw e_2.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to list all the blobs by hierarchy.
+     * under the specified account.
+     *
+     * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * for await (const item of containerClient.listBlobsByHierarchy("/")) {
+     *   if (item.kind === "prefix") {
+     *     console.log(`\tBlobPrefix: ${item.name}`);
+     *   } else {
+     *     console.log(`\tBlobItem: name - ${item.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" });
+     * let entity = await iter.next();
+     * while (!entity.done) {
+     *   let item = entity.value;
+     *   if (item.kind === "prefix") {
+     *     console.log(`\tBlobPrefix: ${item.name}`);
+     *   } else {
+     *     console.log(`\tBlobItem: name - ${item.name}`);
+     *   }
+     *   entity = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * console.log("Listing blobs by hierarchy by page");
+     * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) {
+     *   const segment = response.segment;
+     *   if (segment.blobPrefixes) {
+     *     for (const prefix of segment.blobPrefixes) {
+     *       console.log(`\tBlobPrefix: ${prefix.name}`);
+     *     }
+     *   }
+     *   for (const blob of response.segment.blobItems) {
+     *     console.log(`\tBlobItem: name - ${blob.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a max page size:
+     *
+     * ```js
+     * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size");
+     *
+     * let i = 1;
+     * for await (const response of containerClient
+     *   .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" })
+     *   .byPage({ maxPageSize: 2 })) {
+     *   console.log(`Page ${i++}`);
+     *   const segment = response.segment;
+     *
+     *   if (segment.blobPrefixes) {
+     *     for (const prefix of segment.blobPrefixes) {
+     *       console.log(`\tBlobPrefix: ${prefix.name}`);
+     *     }
+     *   }
+     *
+     *   for (const blob of response.segment.blobItems) {
+     *     console.log(`\tBlobItem: name - ${blob.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * @param delimiter - The character or string used to define the virtual hierarchy
+     * @param options - Options to list blobs operation.
+     */
+    listBlobsByHierarchy(delimiter, options = {}) {
+        if (delimiter === "") {
+            throw new RangeError("delimiter should contain one or more characters");
+        }
+        const include = [];
+        if (options.includeCopy) {
+            include.push("copy");
+        }
+        if (options.includeDeleted) {
+            include.push("deleted");
+        }
+        if (options.includeMetadata) {
+            include.push("metadata");
+        }
+        if (options.includeSnapshots) {
+            include.push("snapshots");
+        }
+        if (options.includeVersions) {
+            include.push("versions");
+        }
+        if (options.includeUncommitedBlobs) {
+            include.push("uncommittedblobs");
+        }
+        if (options.includeTags) {
+            include.push("tags");
+        }
+        if (options.includeDeletedWithVersions) {
+            include.push("deletedwithversions");
+        }
+        if (options.includeImmutabilityPolicy) {
+            include.push("immutabilitypolicy");
+        }
+        if (options.includeLegalHold) {
+            include.push("legalhold");
+        }
+        if (options.prefix === "") {
+            options.prefix = undefined;
+        }
+        const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {}));
+        // AsyncIterableIterator to iterate over blob prefixes and blobs
+        const iter = this.listItemsByHierarchy(delimiter, updatedOptions);
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            async next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions));
+            },
+        };
+    }
+    /**
+     * The Filter Blobs operation enables callers to list blobs in the container whose tags
+     * match a given search expression.
+     *
+     * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                        The given expression must evaluate to true for a blob to be returned in the results.
+     *                                        The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                        however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param marker - A string value that identifies the portion of
+     *                          the list of blobs to be returned with the next listing operation. The
+     *                          operation returns the continuationToken value within the response body if the
+     *                          listing operation did not return all blobs remaining to be listed
+     *                          with the current page. The continuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of list
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to find blobs by tags.
+     */
+    async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) {
+        return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => {
+            const response = assertResponse(await this.containerContext.filterBlobs({
+                abortSignal: options.abortSignal,
+                where: tagFilterSqlExpression,
+                marker,
+                maxPageSize: options.maxPageSize,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => {
+                    var _a;
+                    let tagValue = "";
+                    if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) {
+                        tagValue = blob.tags.blobTagSet[0].value;
+                    }
+                    return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue });
+                }) });
+            return wrappedResponse;
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.
+     *
+     * @param tagFilterSqlExpression -  The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                         The given expression must evaluate to true for a blob to be returned in the results.
+     *                                         The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                         however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param marker - A string value that identifies the portion of
+     *                          the list of blobs to be returned with the next listing operation. The
+     *                          operation returns the continuationToken value within the response body if the
+     *                          listing operation did not return all blobs remaining to be listed
+     *                          with the current page. The continuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of list
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to find blobs by tags.
+     */
+    findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) {
+        return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) {
+            let response;
+            if (!!marker || marker === undefined) {
+                do {
+                    response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options));
+                    response.blobs = response.blobs || [];
+                    marker = response.continuationToken;
+                    yield yield tslib.__await(response);
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for blobs.
+     *
+     * @param tagFilterSqlExpression -  The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                         The given expression must evaluate to true for a blob to be returned in the results.
+     *                                         The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                         however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param options - Options to findBlobsByTagsItems.
+     */
+    findBlobsByTagsItems(tagFilterSqlExpression_1) {
+        return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) {
+            var _a, e_3, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const segment = _c;
+                    yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)));
+                }
+            }
+            catch (e_3_1) { e_3 = { error: e_3_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_3) throw e_3.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to find all blobs with specified tag
+     * under the specified container.
+     *
+     * .byPage() returns an async iterable iterator to list the blobs in pages.
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * let i = 1;
+     * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) {
+     *   console.log(`Blob ${i++}: ${blob.name}`);
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let i = 1;
+     * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'");
+     * let blobItem = await iter.next();
+     * while (!blobItem.done) {
+     *   console.log(`Blob ${i++}: ${blobItem.value.name}`);
+     *   blobItem = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * // passing optional maxPageSize in the page settings
+     * let i = 1;
+     * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) {
+     *   if (response.blobs) {
+     *     for (const blob of response.blobs) {
+     *       console.log(`Blob ${i++}: ${blob.name}`);
+     *     }
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a marker:
+     *
+     * ```js
+     * let i = 1;
+     * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 });
+     * let response = (await iterator.next()).value;
+     *
+     * // Prints 2 blob names
+     * if (response.blobs) {
+     *   for (const blob of response.blobs) {
+     *     console.log(`Blob ${i++}: ${blob.name}`);
+     *   }
+     * }
+     *
+     * // Gets next marker
+     * let marker = response.continuationToken;
+     * // Passing next marker as continuationToken
+     * iterator = containerClient
+     *   .findBlobsByTags("tagkey='tagvalue'")
+     *   .byPage({ continuationToken: marker, maxPageSize: 10 });
+     * response = (await iterator.next()).value;
+     *
+     * // Prints blob names
+     * if (response.blobs) {
+     *   for (const blob of response.blobs) {
+     *      console.log(`Blob ${i++}: ${blob.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * @param tagFilterSqlExpression -  The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                         The given expression must evaluate to true for a blob to be returned in the results.
+     *                                         The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                         however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param options - Options to find blobs by tags.
+     */
+    findBlobsByTags(tagFilterSqlExpression, options = {}) {
+        // AsyncIterableIterator to iterate over blobs
+        const listSegmentOptions = Object.assign({}, options);
+        const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+            },
+        };
+    }
+    /**
+     * The Get Account Information operation returns the sku name and account kind
+     * for the specified account.
+     * The Get Account Information operation is available on service versions beginning
+     * with version 2018-03-28.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information
+     *
+     * @param options - Options to the Service Get Account Info operation.
+     * @returns Response data for the Service Get Account Info operation.
+     */
+    async getAccountInfo(options = {}) {
+        return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => {
+            return assertResponse(await this.containerContext.getAccountInfo({
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    getContainerNameFromUrl() {
+        let containerName;
+        try {
+            //  URL may look like the following
+            // "https://myaccount.blob.core.windows.net/mycontainer?sasString";
+            // "https://myaccount.blob.core.windows.net/mycontainer";
+            // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`
+            // http://localhost:10001/devstoreaccount1/containername
+            const parsedUrl = new URL(this.url);
+            if (parsedUrl.hostname.split(".")[1] === "blob") {
+                // "https://myaccount.blob.core.windows.net/containername".
+                // "https://customdomain.com/containername".
+                // .getPath() -> /containername
+                containerName = parsedUrl.pathname.split("/")[1];
+            }
+            else if (isIpEndpointStyle(parsedUrl)) {
+                // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername
+                // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername
+                // .getPath() -> /devstoreaccount1/containername
+                containerName = parsedUrl.pathname.split("/")[2];
+            }
+            else {
+                // "https://customdomain.com/containername".
+                // .getPath() -> /containername
+                containerName = parsedUrl.pathname.split("/")[1];
+            }
+            // decode the encoded containerName - to get all the special characters that might be present in it
+            containerName = decodeURIComponent(containerName);
+            if (!containerName) {
+                throw new Error("Provided containerName is invalid.");
+            }
+            return containerName;
+        }
+        catch (error) {
+            throw new Error("Unable to extract containerName with provided information.");
+        }
+    }
+    /**
+     * Only available for ContainerClient constructed with a shared key credential.
+     *
+     * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties
+     * and parameters passed in. The SAS is signed by the shared key credential of the client.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateSasUrl(options) {
+        return new Promise((resolve) => {
+            if (!(this.credential instanceof StorageSharedKeyCredential)) {
+                throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential");
+            }
+            const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString();
+            resolve(appendToURLQuery(this.url, sas));
+        });
+    }
+    /**
+     * Only available for ContainerClient constructed with a shared key credential.
+     *
+     * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI
+     * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    generateSasStringToSign(options) {
+        if (!(this.credential instanceof StorageSharedKeyCredential)) {
+            throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential");
+        }
+        return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign;
+    }
+    /**
+     * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties
+     * and parameters passed in. The SAS is signed by the input user delegation key.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @param userDelegationKey -  Return value of `blobServiceClient.getUserDelegationKey()`
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateUserDelegationSasUrl(options, userDelegationKey) {
+        return new Promise((resolve) => {
+            const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString();
+            resolve(appendToURLQuery(this.url, sas));
+        });
+    }
+    /**
+     * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI
+     * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+     *
+     * @param options - Optional parameters.
+     * @param userDelegationKey -  Return value of `blobServiceClient.getUserDelegationKey()`
+     * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateUserDelegationSasStringToSign(options, userDelegationKey) {
+        return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign;
+    }
+    /**
+     * Creates a BlobBatchClient object to conduct batch operations.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+     *
+     * @returns A new BlobBatchClient object for this container.
+     */
+    getBlobBatchClient() {
+        return new BlobBatchClient(this.url, this.pipeline);
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value
+ * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the
+ * values are set, this should be serialized with toString and set as the permissions field on an
+ * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but
+ * the order of the permissions is particular and this class guarantees correctness.
+ */
+class AccountSASPermissions {
+    constructor() {
+        /**
+         * Permission to read resources and list queues and tables granted.
+         */
+        this.read = false;
+        /**
+         * Permission to write resources granted.
+         */
+        this.write = false;
+        /**
+         * Permission to delete blobs and files granted.
+         */
+        this.delete = false;
+        /**
+         * Permission to delete versions granted.
+         */
+        this.deleteVersion = false;
+        /**
+         * Permission to list blob containers, blobs, shares, directories, and files granted.
+         */
+        this.list = false;
+        /**
+         * Permission to add messages, table entities, and append to blobs granted.
+         */
+        this.add = false;
+        /**
+         * Permission to create blobs and files granted.
+         */
+        this.create = false;
+        /**
+         * Permissions to update messages and table entities granted.
+         */
+        this.update = false;
+        /**
+         * Permission to get and delete messages granted.
+         */
+        this.process = false;
+        /**
+         * Specfies Tag access granted.
+         */
+        this.tag = false;
+        /**
+         * Permission to filter blobs.
+         */
+        this.filter = false;
+        /**
+         * Permission to set immutability policy.
+         */
+        this.setImmutabilityPolicy = false;
+        /**
+         * Specifies that Permanent Delete is permitted.
+         */
+        this.permanentDelete = false;
+    }
+    /**
+     * Parse initializes the AccountSASPermissions fields from a string.
+     *
+     * @param permissions -
+     */
+    static parse(permissions) {
+        const accountSASPermissions = new AccountSASPermissions();
+        for (const c of permissions) {
+            switch (c) {
+                case "r":
+                    accountSASPermissions.read = true;
+                    break;
+                case "w":
+                    accountSASPermissions.write = true;
+                    break;
+                case "d":
+                    accountSASPermissions.delete = true;
+                    break;
+                case "x":
+                    accountSASPermissions.deleteVersion = true;
+                    break;
+                case "l":
+                    accountSASPermissions.list = true;
+                    break;
+                case "a":
+                    accountSASPermissions.add = true;
+                    break;
+                case "c":
+                    accountSASPermissions.create = true;
+                    break;
+                case "u":
+                    accountSASPermissions.update = true;
+                    break;
+                case "p":
+                    accountSASPermissions.process = true;
+                    break;
+                case "t":
+                    accountSASPermissions.tag = true;
+                    break;
+                case "f":
+                    accountSASPermissions.filter = true;
+                    break;
+                case "i":
+                    accountSASPermissions.setImmutabilityPolicy = true;
+                    break;
+                case "y":
+                    accountSASPermissions.permanentDelete = true;
+                    break;
+                default:
+                    throw new RangeError(`Invalid permission character: ${c}`);
+            }
+        }
+        return accountSASPermissions;
+    }
+    /**
+     * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it
+     * and boolean values for them.
+     *
+     * @param permissionLike -
+     */
+    static from(permissionLike) {
+        const accountSASPermissions = new AccountSASPermissions();
+        if (permissionLike.read) {
+            accountSASPermissions.read = true;
+        }
+        if (permissionLike.write) {
+            accountSASPermissions.write = true;
+        }
+        if (permissionLike.delete) {
+            accountSASPermissions.delete = true;
+        }
+        if (permissionLike.deleteVersion) {
+            accountSASPermissions.deleteVersion = true;
+        }
+        if (permissionLike.filter) {
+            accountSASPermissions.filter = true;
+        }
+        if (permissionLike.tag) {
+            accountSASPermissions.tag = true;
+        }
+        if (permissionLike.list) {
+            accountSASPermissions.list = true;
+        }
+        if (permissionLike.add) {
+            accountSASPermissions.add = true;
+        }
+        if (permissionLike.create) {
+            accountSASPermissions.create = true;
+        }
+        if (permissionLike.update) {
+            accountSASPermissions.update = true;
+        }
+        if (permissionLike.process) {
+            accountSASPermissions.process = true;
+        }
+        if (permissionLike.setImmutabilityPolicy) {
+            accountSASPermissions.setImmutabilityPolicy = true;
+        }
+        if (permissionLike.permanentDelete) {
+            accountSASPermissions.permanentDelete = true;
+        }
+        return accountSASPermissions;
+    }
+    /**
+     * Produces the SAS permissions string for an Azure Storage account.
+     * Call this method to set AccountSASSignatureValues Permissions field.
+     *
+     * Using this method will guarantee the resource types are in
+     * an order accepted by the service.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+     *
+     */
+    toString() {
+        // The order of the characters should be as specified here to ensure correctness:
+        // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+        // Use a string array instead of string concatenating += operator for performance
+        const permissions = [];
+        if (this.read) {
+            permissions.push("r");
+        }
+        if (this.write) {
+            permissions.push("w");
+        }
+        if (this.delete) {
+            permissions.push("d");
+        }
+        if (this.deleteVersion) {
+            permissions.push("x");
+        }
+        if (this.filter) {
+            permissions.push("f");
+        }
+        if (this.tag) {
+            permissions.push("t");
+        }
+        if (this.list) {
+            permissions.push("l");
+        }
+        if (this.add) {
+            permissions.push("a");
+        }
+        if (this.create) {
+            permissions.push("c");
+        }
+        if (this.update) {
+            permissions.push("u");
+        }
+        if (this.process) {
+            permissions.push("p");
+        }
+        if (this.setImmutabilityPolicy) {
+            permissions.push("i");
+        }
+        if (this.permanentDelete) {
+            permissions.push("y");
+        }
+        return permissions.join("");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value
+ * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the
+ * values are set, this should be serialized with toString and set as the resources field on an
+ * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but
+ * the order of the resources is particular and this class guarantees correctness.
+ */
+class AccountSASResourceTypes {
+    constructor() {
+        /**
+         * Permission to access service level APIs granted.
+         */
+        this.service = false;
+        /**
+         * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.
+         */
+        this.container = false;
+        /**
+         * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.
+         */
+        this.object = false;
+    }
+    /**
+     * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an
+     * Error if it encounters a character that does not correspond to a valid resource type.
+     *
+     * @param resourceTypes -
+     */
+    static parse(resourceTypes) {
+        const accountSASResourceTypes = new AccountSASResourceTypes();
+        for (const c of resourceTypes) {
+            switch (c) {
+                case "s":
+                    accountSASResourceTypes.service = true;
+                    break;
+                case "c":
+                    accountSASResourceTypes.container = true;
+                    break;
+                case "o":
+                    accountSASResourceTypes.object = true;
+                    break;
+                default:
+                    throw new RangeError(`Invalid resource type: ${c}`);
+            }
+        }
+        return accountSASResourceTypes;
+    }
+    /**
+     * Converts the given resource types to a string.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+     *
+     */
+    toString() {
+        const resourceTypes = [];
+        if (this.service) {
+            resourceTypes.push("s");
+        }
+        if (this.container) {
+            resourceTypes.push("c");
+        }
+        if (this.object) {
+            resourceTypes.push("o");
+        }
+        return resourceTypes.join("");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value
+ * to true means that any SAS which uses these permissions will grant access to that service. Once all the
+ * values are set, this should be serialized with toString and set as the services field on an
+ * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but
+ * the order of the services is particular and this class guarantees correctness.
+ */
+class AccountSASServices {
+    constructor() {
+        /**
+         * Permission to access blob resources granted.
+         */
+        this.blob = false;
+        /**
+         * Permission to access file resources granted.
+         */
+        this.file = false;
+        /**
+         * Permission to access queue resources granted.
+         */
+        this.queue = false;
+        /**
+         * Permission to access table resources granted.
+         */
+        this.table = false;
+    }
+    /**
+     * Creates an {@link AccountSASServices} from the specified services string. This method will throw an
+     * Error if it encounters a character that does not correspond to a valid service.
+     *
+     * @param services -
+     */
+    static parse(services) {
+        const accountSASServices = new AccountSASServices();
+        for (const c of services) {
+            switch (c) {
+                case "b":
+                    accountSASServices.blob = true;
+                    break;
+                case "f":
+                    accountSASServices.file = true;
+                    break;
+                case "q":
+                    accountSASServices.queue = true;
+                    break;
+                case "t":
+                    accountSASServices.table = true;
+                    break;
+                default:
+                    throw new RangeError(`Invalid service character: ${c}`);
+            }
+        }
+        return accountSASServices;
+    }
+    /**
+     * Converts the given services to a string.
+     *
+     */
+    toString() {
+        const services = [];
+        if (this.blob) {
+            services.push("b");
+        }
+        if (this.table) {
+            services.push("t");
+        }
+        if (this.queue) {
+            services.push("q");
+        }
+        if (this.file) {
+            services.push("f");
+        }
+        return services.join("");
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual
+ * REST request.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+ *
+ * @param accountSASSignatureValues -
+ * @param sharedKeyCredential -
+ */
+function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) {
+    return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential)
+        .sasQueryParameters;
+}
+function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) {
+    const version = accountSASSignatureValues.version
+        ? accountSASSignatureValues.version
+        : SERVICE_VERSION;
+    if (accountSASSignatureValues.permissions &&
+        accountSASSignatureValues.permissions.setImmutabilityPolicy &&
+        version < "2020-08-04") {
+        throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission.");
+    }
+    if (accountSASSignatureValues.permissions &&
+        accountSASSignatureValues.permissions.deleteVersion &&
+        version < "2019-10-10") {
+        throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
+    }
+    if (accountSASSignatureValues.permissions &&
+        accountSASSignatureValues.permissions.permanentDelete &&
+        version < "2019-10-10") {
+        throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission.");
+    }
+    if (accountSASSignatureValues.permissions &&
+        accountSASSignatureValues.permissions.tag &&
+        version < "2019-12-12") {
+        throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+    }
+    if (accountSASSignatureValues.permissions &&
+        accountSASSignatureValues.permissions.filter &&
+        version < "2019-12-12") {
+        throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission.");
+    }
+    if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") {
+        throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.");
+    }
+    const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString());
+    const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();
+    const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString();
+    let stringToSign;
+    if (version >= "2020-12-06") {
+        stringToSign = [
+            sharedKeyCredential.accountName,
+            parsedPermissions,
+            parsedServices,
+            parsedResourceTypes,
+            accountSASSignatureValues.startsOn
+                ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)
+                : "",
+            truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),
+            accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "",
+            accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "",
+            version,
+            accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "",
+            "", // Account SAS requires an additional newline character
+        ].join("\n");
+    }
+    else {
+        stringToSign = [
+            sharedKeyCredential.accountName,
+            parsedPermissions,
+            parsedServices,
+            parsedResourceTypes,
+            accountSASSignatureValues.startsOn
+                ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)
+                : "",
+            truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),
+            accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "",
+            accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "",
+            version,
+            "", // Account SAS requires an additional newline character
+        ].join("\n");
+    }
+    const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+    return {
+        sasQueryParameters: new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope),
+        stringToSign: stringToSign,
+    };
+}
+
+/**
+ * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you
+ * to manipulate blob containers.
+ */
+class BlobServiceClient extends StorageClient {
+    /**
+     *
+     * Creates an instance of BlobServiceClient from connection string.
+     *
+     * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.
+     *                                  [ Note - Account connection string can only be used in NODE.JS runtime. ]
+     *                                  Account connection string example -
+     *                                  `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`
+     *                                  SAS connection string example -
+     *                                  `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`
+     * @param options - Optional. Options to configure the HTTP pipeline.
+     */
+    static fromConnectionString(connectionString, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        options = options || {};
+        const extractedCreds = extractConnectionStringParts(connectionString);
+        if (extractedCreds.kind === "AccountConnString") {
+            if (coreUtil.isNode) {
+                const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+                if (!options.proxyOptions) {
+                    options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri);
+                }
+                const pipeline = newPipeline(sharedKeyCredential, options);
+                return new BlobServiceClient(extractedCreds.url, pipeline);
+            }
+            else {
+                throw new Error("Account connection string is only supported in Node.js environment");
+            }
+        }
+        else if (extractedCreds.kind === "SASConnString") {
+            const pipeline = newPipeline(new AnonymousCredential(), options);
+            return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline);
+        }
+        else {
+            throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+        }
+    }
+    constructor(url, credentialOrPipeline, 
+    // Legacy, no fix for eslint error without breaking. Disable it for this interface.
+    /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/
+    options) {
+        let pipeline;
+        if (isPipelineLike(credentialOrPipeline)) {
+            pipeline = credentialOrPipeline;
+        }
+        else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||
+            credentialOrPipeline instanceof AnonymousCredential ||
+            coreAuth.isTokenCredential(credentialOrPipeline)) {
+            pipeline = newPipeline(credentialOrPipeline, options);
+        }
+        else {
+            // The second parameter is undefined. Use anonymous credential
+            pipeline = newPipeline(new AnonymousCredential(), options);
+        }
+        super(url, pipeline);
+        this.serviceContext = this.storageClientContext.service;
+    }
+    /**
+     * Creates a {@link ContainerClient} object
+     *
+     * @param containerName - A container name
+     * @returns A new ContainerClient object for the given container name.
+     *
+     * Example usage:
+     *
+     * ```js
+     * const containerClient = blobServiceClient.getContainerClient("<container name>");
+     * ```
+     */
+    getContainerClient(containerName) {
+        return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline);
+    }
+    /**
+     * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
+     *
+     * @param containerName - Name of the container to create.
+     * @param options - Options to configure Container Create operation.
+     * @returns Container creation response and the corresponding container client.
+     */
+    async createContainer(containerName, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => {
+            const containerClient = this.getContainerClient(containerName);
+            const containerCreateResponse = await containerClient.create(updatedOptions);
+            return {
+                containerClient,
+                containerCreateResponse,
+            };
+        });
+    }
+    /**
+     * Deletes a Blob container.
+     *
+     * @param containerName - Name of the container to delete.
+     * @param options - Options to configure Container Delete operation.
+     * @returns Container deletion response.
+     */
+    async deleteContainer(containerName, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => {
+            const containerClient = this.getContainerClient(containerName);
+            return containerClient.delete(updatedOptions);
+        });
+    }
+    /**
+     * Restore a previously deleted Blob container.
+     * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.
+     *
+     * @param deletedContainerName - Name of the previously deleted container.
+     * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.
+     * @param options - Options to configure Container Restore operation.
+     * @returns Container deletion response.
+     */
+    async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => {
+            const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName);
+            // Hack to access a protected member.
+            const containerContext = containerClient["storageClientContext"].container;
+            const containerUndeleteResponse = assertResponse(await containerContext.restore({
+                deletedContainerName,
+                deletedContainerVersion,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            return { containerClient, containerUndeleteResponse };
+        });
+    }
+    /**
+     * Rename an existing Blob Container.
+     *
+     * @param sourceContainerName - The name of the source container.
+     * @param destinationContainerName - The new name of the container.
+     * @param options - Options to configure Container Rename operation.
+     */
+    /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */
+    // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.
+    async renameContainer(sourceContainerName, destinationContainerName, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => {
+            var _a;
+            const containerClient = this.getContainerClient(destinationContainerName);
+            // Hack to access a protected member.
+            const containerContext = containerClient["storageClientContext"].container;
+            const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })));
+            return { containerClient, containerRenameResponse };
+        });
+    }
+    /**
+     * Gets the properties of a storage account’s Blob service, including properties
+     * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties
+     *
+     * @param options - Options to the Service Get Properties operation.
+     * @returns Response data for the Service Get Properties operation.
+     */
+    async getProperties(options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => {
+            return assertResponse(await this.serviceContext.getProperties({
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Sets properties for a storage account’s Blob service endpoint, including properties
+     * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties
+     *
+     * @param properties -
+     * @param options - Options to the Service Set Properties operation.
+     * @returns Response data for the Service Set Properties operation.
+     */
+    async setProperties(properties, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => {
+            return assertResponse(await this.serviceContext.setProperties(properties, {
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Retrieves statistics related to replication for the Blob service. It is only
+     * available on the secondary location endpoint when read-access geo-redundant
+     * replication is enabled for the storage account.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats
+     *
+     * @param options - Options to the Service Get Statistics operation.
+     * @returns Response data for the Service Get Statistics operation.
+     */
+    async getStatistics(options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => {
+            return assertResponse(await this.serviceContext.getStatistics({
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * The Get Account Information operation returns the sku name and account kind
+     * for the specified account.
+     * The Get Account Information operation is available on service versions beginning
+     * with version 2018-03-28.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information
+     *
+     * @param options - Options to the Service Get Account Info operation.
+     * @returns Response data for the Service Get Account Info operation.
+     */
+    async getAccountInfo(options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => {
+            return assertResponse(await this.serviceContext.getAccountInfo({
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+        });
+    }
+    /**
+     * Returns a list of the containers under the specified account.
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2
+     *
+     * @param marker - A string value that identifies the portion of
+     *                        the list of containers to be returned with the next listing operation. The
+     *                        operation returns the continuationToken value within the response body if the
+     *                        listing operation did not return all containers remaining to be listed
+     *                        with the current page. The continuationToken value can be used as the value for
+     *                        the marker parameter in a subsequent call to request the next page of list
+     *                        items. The marker value is opaque to the client.
+     * @param options - Options to the Service List Container Segment operation.
+     * @returns Response data for the Service List Container Segment operation.
+     */
+    async listContainersSegment(marker, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => {
+            return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions })));
+        });
+    }
+    /**
+     * The Filter Blobs operation enables callers to list blobs across all containers whose tags
+     * match a given search expression. Filter blobs searches across all containers within a
+     * storage account but can be scoped within the expression to a single container.
+     *
+     * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                        The given expression must evaluate to true for a blob to be returned in the results.
+     *                                        The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                        however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param marker - A string value that identifies the portion of
+     *                          the list of blobs to be returned with the next listing operation. The
+     *                          operation returns the continuationToken value within the response body if the
+     *                          listing operation did not return all blobs remaining to be listed
+     *                          with the current page. The continuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of list
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to find blobs by tags.
+     */
+    async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => {
+            const response = assertResponse(await this.serviceContext.filterBlobs({
+                abortSignal: options.abortSignal,
+                where: tagFilterSqlExpression,
+                marker,
+                maxPageSize: options.maxPageSize,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => {
+                    var _a;
+                    let tagValue = "";
+                    if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) {
+                        tagValue = blob.tags.blobTagSet[0].value;
+                    }
+                    return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue });
+                }) });
+            return wrappedResponse;
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.
+     *
+     * @param tagFilterSqlExpression -  The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                         The given expression must evaluate to true for a blob to be returned in the results.
+     *                                         The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                         however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param marker - A string value that identifies the portion of
+     *                          the list of blobs to be returned with the next listing operation. The
+     *                          operation returns the continuationToken value within the response body if the
+     *                          listing operation did not return all blobs remaining to be listed
+     *                          with the current page. The continuationToken value can be used as the value for
+     *                          the marker parameter in a subsequent call to request the next page of list
+     *                          items. The marker value is opaque to the client.
+     * @param options - Options to find blobs by tags.
+     */
+    findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) {
+        return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) {
+            let response;
+            if (!!marker || marker === undefined) {
+                do {
+                    response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options));
+                    response.blobs = response.blobs || [];
+                    marker = response.continuationToken;
+                    yield yield tslib.__await(response);
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for blobs.
+     *
+     * @param tagFilterSqlExpression -  The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                         The given expression must evaluate to true for a blob to be returned in the results.
+     *                                         The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                         however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param options - Options to findBlobsByTagsItems.
+     */
+    findBlobsByTagsItems(tagFilterSqlExpression_1) {
+        return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) {
+            var _a, e_1, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const segment = _c;
+                    yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)));
+                }
+            }
+            catch (e_1_1) { e_1 = { error: e_1_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_1) throw e_1.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to find all blobs with specified tag
+     * under the specified account.
+     *
+     * .byPage() returns an async iterable iterator to list the blobs in pages.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * let i = 1;
+     * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) {
+     *   console.log(`Blob ${i++}: ${container.name}`);
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let i = 1;
+     * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'");
+     * let blobItem = await iter.next();
+     * while (!blobItem.done) {
+     *   console.log(`Blob ${i++}: ${blobItem.value.name}`);
+     *   blobItem = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * // passing optional maxPageSize in the page settings
+     * let i = 1;
+     * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) {
+     *   if (response.blobs) {
+     *     for (const blob of response.blobs) {
+     *       console.log(`Blob ${i++}: ${blob.name}`);
+     *     }
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a marker:
+     *
+     * ```js
+     * let i = 1;
+     * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 });
+     * let response = (await iterator.next()).value;
+     *
+     * // Prints 2 blob names
+     * if (response.blobs) {
+     *   for (const blob of response.blobs) {
+     *     console.log(`Blob ${i++}: ${blob.name}`);
+     *   }
+     * }
+     *
+     * // Gets next marker
+     * let marker = response.continuationToken;
+     * // Passing next marker as continuationToken
+     * iterator = blobServiceClient
+     *   .findBlobsByTags("tagkey='tagvalue'")
+     *   .byPage({ continuationToken: marker, maxPageSize: 10 });
+     * response = (await iterator.next()).value;
+     *
+     * // Prints blob names
+     * if (response.blobs) {
+     *   for (const blob of response.blobs) {
+     *      console.log(`Blob ${i++}: ${blob.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * @param tagFilterSqlExpression -  The where parameter enables the caller to query blobs whose tags match a given expression.
+     *                                         The given expression must evaluate to true for a blob to be returned in the results.
+     *                                         The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+     *                                         however, only a subset of the OData filter syntax is supported in the Blob service.
+     * @param options - Options to find blobs by tags.
+     */
+    findBlobsByTags(tagFilterSqlExpression, options = {}) {
+        // AsyncIterableIterator to iterate over blobs
+        const listSegmentOptions = Object.assign({}, options);
+        const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+            },
+        };
+    }
+    /**
+     * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses
+     *
+     * @param marker - A string value that identifies the portion of
+     *                        the list of containers to be returned with the next listing operation. The
+     *                        operation returns the continuationToken value within the response body if the
+     *                        listing operation did not return all containers remaining to be listed
+     *                        with the current page. The continuationToken value can be used as the value for
+     *                        the marker parameter in a subsequent call to request the next page of list
+     *                        items. The marker value is opaque to the client.
+     * @param options - Options to list containers operation.
+     */
+    listSegments(marker_1) {
+        return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) {
+            let listContainersSegmentResponse;
+            if (!!marker || marker === undefined) {
+                do {
+                    listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options));
+                    listContainersSegmentResponse.containerItems =
+                        listContainersSegmentResponse.containerItems || [];
+                    marker = listContainersSegmentResponse.continuationToken;
+                    yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse));
+                } while (marker);
+            }
+        });
+    }
+    /**
+     * Returns an AsyncIterableIterator for Container Items
+     *
+     * @param options - Options to list containers operation.
+     */
+    listItems() {
+        return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) {
+            var _a, e_2, _b, _c;
+            let marker;
+            try {
+                for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) {
+                    _c = _f.value;
+                    _d = false;
+                    const segment = _c;
+                    yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)));
+                }
+            }
+            catch (e_2_1) { e_2 = { error: e_2_1 }; }
+            finally {
+                try {
+                    if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e));
+                }
+                finally { if (e_2) throw e_2.error; }
+            }
+        });
+    }
+    /**
+     * Returns an async iterable iterator to list all the containers
+     * under the specified account.
+     *
+     * .byPage() returns an async iterable iterator to list the containers in pages.
+     *
+     * Example using `for await` syntax:
+     *
+     * ```js
+     * let i = 1;
+     * for await (const container of blobServiceClient.listContainers()) {
+     *   console.log(`Container ${i++}: ${container.name}`);
+     * }
+     * ```
+     *
+     * Example using `iter.next()`:
+     *
+     * ```js
+     * let i = 1;
+     * const iter = blobServiceClient.listContainers();
+     * let containerItem = await iter.next();
+     * while (!containerItem.done) {
+     *   console.log(`Container ${i++}: ${containerItem.value.name}`);
+     *   containerItem = await iter.next();
+     * }
+     * ```
+     *
+     * Example using `byPage()`:
+     *
+     * ```js
+     * // passing optional maxPageSize in the page settings
+     * let i = 1;
+     * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {
+     *   if (response.containerItems) {
+     *     for (const container of response.containerItems) {
+     *       console.log(`Container ${i++}: ${container.name}`);
+     *     }
+     *   }
+     * }
+     * ```
+     *
+     * Example using paging with a marker:
+     *
+     * ```js
+     * let i = 1;
+     * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });
+     * let response = (await iterator.next()).value;
+     *
+     * // Prints 2 container names
+     * if (response.containerItems) {
+     *   for (const container of response.containerItems) {
+     *     console.log(`Container ${i++}: ${container.name}`);
+     *   }
+     * }
+     *
+     * // Gets next marker
+     * let marker = response.continuationToken;
+     * // Passing next marker as continuationToken
+     * iterator = blobServiceClient
+     *   .listContainers()
+     *   .byPage({ continuationToken: marker, maxPageSize: 10 });
+     * response = (await iterator.next()).value;
+     *
+     * // Prints 10 container names
+     * if (response.containerItems) {
+     *   for (const container of response.containerItems) {
+     *      console.log(`Container ${i++}: ${container.name}`);
+     *   }
+     * }
+     * ```
+     *
+     * @param options - Options to list containers.
+     * @returns An asyncIterableIterator that supports paging.
+     */
+    listContainers(options = {}) {
+        if (options.prefix === "") {
+            options.prefix = undefined;
+        }
+        const include = [];
+        if (options.includeDeleted) {
+            include.push("deleted");
+        }
+        if (options.includeMetadata) {
+            include.push("metadata");
+        }
+        if (options.includeSystem) {
+            include.push("system");
+        }
+        // AsyncIterableIterator to iterate over containers
+        const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {}));
+        const iter = this.listItems(listSegmentOptions);
+        return {
+            /**
+             * The next method, part of the iteration protocol
+             */
+            next() {
+                return iter.next();
+            },
+            /**
+             * The connection to the async iterator, part of the iteration protocol
+             */
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+            /**
+             * Return an AsyncIterableIterator that works a page at a time
+             */
+            byPage: (settings = {}) => {
+                return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+            },
+        };
+    }
+    /**
+     * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).
+     *
+     * Retrieves a user delegation key for the Blob service. This is only a valid operation when using
+     * bearer token authentication.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key
+     *
+     * @param startsOn -      The start time for the user delegation SAS. Must be within 7 days of the current time
+     * @param expiresOn -     The end time for the user delegation SAS. Must be within 7 days of the current time
+     */
+    async getUserDelegationKey(startsOn, expiresOn, options = {}) {
+        return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => {
+            const response = assertResponse(await this.serviceContext.getUserDelegationKey({
+                startsOn: truncatedISO8061Date(startsOn, false),
+                expiresOn: truncatedISO8061Date(expiresOn, false),
+            }, {
+                abortSignal: options.abortSignal,
+                tracingOptions: updatedOptions.tracingOptions,
+            }));
+            const userDelegationKey = {
+                signedObjectId: response.signedObjectId,
+                signedTenantId: response.signedTenantId,
+                signedStartsOn: new Date(response.signedStartsOn),
+                signedExpiresOn: new Date(response.signedExpiresOn),
+                signedService: response.signedService,
+                signedVersion: response.signedVersion,
+                value: response.value,
+            };
+            const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey);
+            return res;
+        });
+    }
+    /**
+     * Creates a BlobBatchClient object to conduct batch operations.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+     *
+     * @returns A new BlobBatchClient object for this service.
+     */
+    getBlobBatchClient() {
+        return new BlobBatchClient(this.url, this.pipeline);
+    }
+    /**
+     * Only available for BlobServiceClient constructed with a shared key credential.
+     *
+     * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties
+     * and parameters passed in. The SAS is signed by the shared key credential of the client.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas
+     *
+     * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.
+     * @param permissions - Specifies the list of permissions to be associated with the SAS.
+     * @param resourceTypes - Specifies the resource types associated with the shared access signature.
+     * @param options - Optional parameters.
+     * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) {
+        if (!(this.credential instanceof StorageSharedKeyCredential)) {
+            throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential");
+        }
+        if (expiresOn === undefined) {
+            const now = new Date();
+            expiresOn = new Date(now.getTime() + 3600 * 1000);
+        }
+        const sas = generateAccountSASQueryParameters(Object.assign({ permissions,
+            expiresOn,
+            resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString();
+        return appendToURLQuery(this.url, sas);
+    }
+    /**
+     * Only available for BlobServiceClient constructed with a shared key credential.
+     *
+     * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on
+     * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client.
+     *
+     * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas
+     *
+     * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.
+     * @param permissions - Specifies the list of permissions to be associated with the SAS.
+     * @param resourceTypes - Specifies the resource types associated with the shared access signature.
+     * @param options - Optional parameters.
+     * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
+     */
+    generateSasStringToSign(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) {
+        if (!(this.credential instanceof StorageSharedKeyCredential)) {
+            throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential");
+        }
+        if (expiresOn === undefined) {
+            const now = new Date();
+            expiresOn = new Date(now.getTime() + 3600 * 1000);
+        }
+        return generateAccountSASQueryParametersInternal(Object.assign({ permissions,
+            expiresOn,
+            resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign;
+    }
+}
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */
+exports.KnownEncryptionAlgorithmType = void 0;
+(function (KnownEncryptionAlgorithmType) {
+    KnownEncryptionAlgorithmType["AES256"] = "AES256";
+})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {}));
+
+Object.defineProperty(exports, "RestError", ({
+    enumerable: true,
+    get: function () { return coreRestPipeline.RestError; }
+}));
+exports.AccountSASPermissions = AccountSASPermissions;
+exports.AccountSASResourceTypes = AccountSASResourceTypes;
+exports.AccountSASServices = AccountSASServices;
+exports.AnonymousCredential = AnonymousCredential;
+exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy;
+exports.AppendBlobClient = AppendBlobClient;
+exports.BaseRequestPolicy = BaseRequestPolicy;
+exports.BlobBatch = BlobBatch;
+exports.BlobBatchClient = BlobBatchClient;
+exports.BlobClient = BlobClient;
+exports.BlobLeaseClient = BlobLeaseClient;
+exports.BlobSASPermissions = BlobSASPermissions;
+exports.BlobServiceClient = BlobServiceClient;
+exports.BlockBlobClient = BlockBlobClient;
+exports.ContainerClient = ContainerClient;
+exports.ContainerSASPermissions = ContainerSASPermissions;
+exports.Credential = Credential;
+exports.CredentialPolicy = CredentialPolicy;
+exports.PageBlobClient = PageBlobClient;
+exports.Pipeline = Pipeline;
+exports.SASQueryParameters = SASQueryParameters;
+exports.StorageBrowserPolicy = StorageBrowserPolicy;
+exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory;
+exports.StorageOAuthScopes = StorageOAuthScopes;
+exports.StorageRetryPolicy = StorageRetryPolicy;
+exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory;
+exports.StorageSharedKeyCredential = StorageSharedKeyCredential;
+exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy;
+exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters;
+exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters;
+exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience;
+exports.isPipelineLike = isPipelineLike;
+exports.logger = logger;
+exports.newPipeline = newPipeline;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 8473:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.req = exports.json = exports.toBuffer = void 0;
+const http = __importStar(__nccwpck_require__(8611));
+const https = __importStar(__nccwpck_require__(5692));
+async function toBuffer(stream) {
+    let length = 0;
+    const chunks = [];
+    for await (const chunk of stream) {
+        length += chunk.length;
+        chunks.push(chunk);
+    }
+    return Buffer.concat(chunks, length);
+}
+exports.toBuffer = toBuffer;
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+async function json(stream) {
+    const buf = await toBuffer(stream);
+    const str = buf.toString('utf8');
+    try {
+        return JSON.parse(str);
+    }
+    catch (_err) {
+        const err = _err;
+        err.message += ` (input: ${str})`;
+        throw err;
+    }
+}
+exports.json = json;
+function req(url, opts = {}) {
+    const href = typeof url === 'string' ? url : url.href;
+    const req = (href.startsWith('https:') ? https : http).request(url, opts);
+    const promise = new Promise((resolve, reject) => {
+        req
+            .once('response', resolve)
+            .once('error', reject)
+            .end();
+    });
+    req.then = promise.then.bind(promise);
+    return req;
+}
+exports.req = req;
+//# sourceMappingURL=helpers.js.map
+
+/***/ }),
+
+/***/ 3583:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Agent = void 0;
+const net = __importStar(__nccwpck_require__(9278));
+const http = __importStar(__nccwpck_require__(8611));
+const https_1 = __nccwpck_require__(5692);
+__exportStar(__nccwpck_require__(8473), exports);
+const INTERNAL = Symbol('AgentBaseInternalState');
+class Agent extends http.Agent {
+    constructor(opts) {
+        super(opts);
+        this[INTERNAL] = {};
+    }
+    /**
+     * Determine whether this is an `http` or `https` request.
+     */
+    isSecureEndpoint(options) {
+        if (options) {
+            // First check the `secureEndpoint` property explicitly, since this
+            // means that a parent `Agent` is "passing through" to this instance.
+            // eslint-disable-next-line @typescript-eslint/no-explicit-any
+            if (typeof options.secureEndpoint === 'boolean') {
+                return options.secureEndpoint;
+            }
+            // If no explicit `secure` endpoint, check if `protocol` property is
+            // set. This will usually be the case since using a full string URL
+            // or `URL` instance should be the most common usage.
+            if (typeof options.protocol === 'string') {
+                return options.protocol === 'https:';
+            }
+        }
+        // Finally, if no `protocol` property was set, then fall back to
+        // checking the stack trace of the current call stack, and try to
+        // detect the "https" module.
+        const { stack } = new Error();
+        if (typeof stack !== 'string')
+            return false;
+        return stack
+            .split('\n')
+            .some((l) => l.indexOf('(https.js:') !== -1 ||
+            l.indexOf('node:https:') !== -1);
+    }
+    // In order to support async signatures in `connect()` and Node's native
+    // connection pooling in `http.Agent`, the array of sockets for each origin
+    // has to be updated synchronously. This is so the length of the array is
+    // accurate when `addRequest()` is next called. We achieve this by creating a
+    // fake socket and adding it to `sockets[origin]` and incrementing
+    // `totalSocketCount`.
+    incrementSockets(name) {
+        // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no
+        // need to create a fake socket because Node.js native connection pooling
+        // will never be invoked.
+        if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) {
+            return null;
+        }
+        // All instances of `sockets` are expected TypeScript errors. The
+        // alternative is to add it as a private property of this class but that
+        // will break TypeScript subclassing.
+        if (!this.sockets[name]) {
+            // @ts-expect-error `sockets` is readonly in `@types/node`
+            this.sockets[name] = [];
+        }
+        const fakeSocket = new net.Socket({ writable: false });
+        this.sockets[name].push(fakeSocket);
+        // @ts-expect-error `totalSocketCount` isn't defined in `@types/node`
+        this.totalSocketCount++;
+        return fakeSocket;
+    }
+    decrementSockets(name, socket) {
+        if (!this.sockets[name] || socket === null) {
+            return;
+        }
+        const sockets = this.sockets[name];
+        const index = sockets.indexOf(socket);
+        if (index !== -1) {
+            sockets.splice(index, 1);
+            // @ts-expect-error  `totalSocketCount` isn't defined in `@types/node`
+            this.totalSocketCount--;
+            if (sockets.length === 0) {
+                // @ts-expect-error `sockets` is readonly in `@types/node`
+                delete this.sockets[name];
+            }
+        }
+    }
+    // In order to properly update the socket pool, we need to call `getName()` on
+    // the core `https.Agent` if it is a secureEndpoint.
+    getName(options) {
+        const secureEndpoint = typeof options.secureEndpoint === 'boolean'
+            ? options.secureEndpoint
+            : this.isSecureEndpoint(options);
+        if (secureEndpoint) {
+            // @ts-expect-error `getName()` isn't defined in `@types/node`
+            return https_1.Agent.prototype.getName.call(this, options);
+        }
+        // @ts-expect-error `getName()` isn't defined in `@types/node`
+        return super.getName(options);
+    }
+    createSocket(req, options, cb) {
+        const connectOpts = {
+            ...options,
+            secureEndpoint: this.isSecureEndpoint(options),
+        };
+        const name = this.getName(connectOpts);
+        const fakeSocket = this.incrementSockets(name);
+        Promise.resolve()
+            .then(() => this.connect(req, connectOpts))
+            .then((socket) => {
+            this.decrementSockets(name, fakeSocket);
+            if (socket instanceof http.Agent) {
+                // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+                return socket.addRequest(req, connectOpts);
+            }
+            this[INTERNAL].currentSocket = socket;
+            // @ts-expect-error `createSocket()` isn't defined in `@types/node`
+            super.createSocket(req, options, cb);
+        }, (err) => {
+            this.decrementSockets(name, fakeSocket);
+            cb(err);
+        });
+    }
+    createConnection() {
+        const socket = this[INTERNAL].currentSocket;
+        this[INTERNAL].currentSocket = undefined;
+        if (!socket) {
+            throw new Error('No socket was returned in the `connect()` function');
+        }
+        return socket;
+    }
+    get defaultPort() {
+        return (this[INTERNAL].defaultPort ??
+            (this.protocol === 'https:' ? 443 : 80));
+    }
+    set defaultPort(v) {
+        if (this[INTERNAL]) {
+            this[INTERNAL].defaultPort = v;
+        }
+    }
+    get protocol() {
+        return (this[INTERNAL].protocol ??
+            (this.isSecureEndpoint() ? 'https:' : 'http:'));
+    }
+    set protocol(v) {
+        if (this[INTERNAL]) {
+            this[INTERNAL].protocol = v;
+        }
+    }
+}
+exports.Agent = Agent;
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 127:
+/***/ ((module) => {
+
+
+module.exports = balanced;
+function balanced(a, b, str) {
+  if (a instanceof RegExp) a = maybeMatch(a, str);
+  if (b instanceof RegExp) b = maybeMatch(b, str);
+
+  var r = range(a, b, str);
+
+  return r && {
+    start: r[0],
+    end: r[1],
+    pre: str.slice(0, r[0]),
+    body: str.slice(r[0] + a.length, r[1]),
+    post: str.slice(r[1] + b.length)
+  };
+}
+
+function maybeMatch(reg, str) {
+  var m = str.match(reg);
+  return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+  var begs, beg, left, right, result;
+  var ai = str.indexOf(a);
+  var bi = str.indexOf(b, ai + 1);
+  var i = ai;
+
+  if (ai >= 0 && bi > 0) {
+    if(a===b) {
+      return [ai, bi];
+    }
+    begs = [];
+    left = str.length;
+
+    while (i >= 0 && !result) {
+      if (i == ai) {
+        begs.push(i);
+        ai = str.indexOf(a, i + 1);
+      } else if (begs.length == 1) {
+        result = [ begs.pop(), bi ];
+      } else {
+        beg = begs.pop();
+        if (beg < left) {
+          left = beg;
+          right = bi;
+        }
+
+        bi = str.indexOf(b, i + 1);
+      }
+
+      i = ai < bi && ai >= 0 ? ai : bi;
+    }
+
+    if (begs.length) {
+      result = [ left, right ];
+    }
+  }
+
+  return result;
+}
+
+
+/***/ }),
+
+/***/ 5612:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var concatMap = __nccwpck_require__(8401);
+var balanced = __nccwpck_require__(127);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+  return parseInt(str, 10) == str
+    ? parseInt(str, 10)
+    : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+  return str.split('\\\\').join(escSlash)
+            .split('\\{').join(escOpen)
+            .split('\\}').join(escClose)
+            .split('\\,').join(escComma)
+            .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+  return str.split(escSlash).join('\\')
+            .split(escOpen).join('{')
+            .split(escClose).join('}')
+            .split(escComma).join(',')
+            .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+  if (!str)
+    return [''];
+
+  var parts = [];
+  var m = balanced('{', '}', str);
+
+  if (!m)
+    return str.split(',');
+
+  var pre = m.pre;
+  var body = m.body;
+  var post = m.post;
+  var p = pre.split(',');
+
+  p[p.length-1] += '{' + body + '}';
+  var postParts = parseCommaParts(post);
+  if (post.length) {
+    p[p.length-1] += postParts.shift();
+    p.push.apply(p, postParts);
+  }
+
+  parts.push.apply(parts, p);
+
+  return parts;
+}
+
+function expandTop(str) {
+  if (!str)
+    return [];
+
+  // I don't know why Bash 4.3 does this, but it does.
+  // Anything starting with {} will have the first two bytes preserved
+  // but *only* at the top level, so {},a}b will not expand to anything,
+  // but a{},b}c will be expanded to [a}c,abc].
+  // One could argue that this is a bug in Bash, but since the goal of
+  // this module is to match Bash's rules, we escape a leading {}
+  if (str.substr(0, 2) === '{}') {
+    str = '\\{\\}' + str.substr(2);
+  }
+
+  return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+  return e;
+}
+
+function embrace(str) {
+  return '{' + str + '}';
+}
+function isPadded(el) {
+  return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+  return i <= y;
+}
+function gte(i, y) {
+  return i >= y;
+}
+
+function expand(str, isTop) {
+  var expansions = [];
+
+  var m = balanced('{', '}', str);
+  if (!m || /\$$/.test(m.pre)) return [str];
+
+  var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+  var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+  var isSequence = isNumericSequence || isAlphaSequence;
+  var isOptions = m.body.indexOf(',') >= 0;
+  if (!isSequence && !isOptions) {
+    // {a},b}
+    if (m.post.match(/,.*\}/)) {
+      str = m.pre + '{' + m.body + escClose + m.post;
+      return expand(str);
+    }
+    return [str];
+  }
+
+  var n;
+  if (isSequence) {
+    n = m.body.split(/\.\./);
+  } else {
+    n = parseCommaParts(m.body);
+    if (n.length === 1) {
+      // x{{a,b}}y ==> x{a}y x{b}y
+      n = expand(n[0], false).map(embrace);
+      if (n.length === 1) {
+        var post = m.post.length
+          ? expand(m.post, false)
+          : [''];
+        return post.map(function(p) {
+          return m.pre + n[0] + p;
+        });
+      }
+    }
+  }
+
+  // at this point, n is the parts, and we know it's not a comma set
+  // with a single entry.
+
+  // no need to expand pre, since it is guaranteed to be free of brace-sets
+  var pre = m.pre;
+  var post = m.post.length
+    ? expand(m.post, false)
+    : [''];
+
+  var N;
+
+  if (isSequence) {
+    var x = numeric(n[0]);
+    var y = numeric(n[1]);
+    var width = Math.max(n[0].length, n[1].length)
+    var incr = n.length == 3
+      ? Math.abs(numeric(n[2]))
+      : 1;
+    var test = lte;
+    var reverse = y < x;
+    if (reverse) {
+      incr *= -1;
+      test = gte;
+    }
+    var pad = n.some(isPadded);
+
+    N = [];
+
+    for (var i = x; test(i, y); i += incr) {
+      var c;
+      if (isAlphaSequence) {
+        c = String.fromCharCode(i);
+        if (c === '\\')
+          c = '';
+      } else {
+        c = String(i);
+        if (pad) {
+          var need = width - c.length;
+          if (need > 0) {
+            var z = new Array(need + 1).join('0');
+            if (i < 0)
+              c = '-' + z + c.slice(1);
+            else
+              c = z + c;
+          }
+        }
+      }
+      N.push(c);
+    }
+  } else {
+    N = concatMap(n, function(el) { return expand(el, false) });
+  }
+
+  for (var j = 0; j < N.length; j++) {
+    for (var k = 0; k < post.length; k++) {
+      var expansion = pre + N[j] + post[k];
+      if (!isTop || isSequence || expansion)
+        expansions.push(expansion);
+    }
+  }
+
+  return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 8401:
+/***/ ((module) => {
+
+module.exports = function (xs, fn) {
+    var res = [];
+    for (var i = 0; i < xs.length; i++) {
+        var x = fn(xs[i], i);
+        if (isArray(x)) res.push.apply(res, x);
+        else res.push(x);
+    }
+    return res;
+};
+
+var isArray = Array.isArray || function (xs) {
+    return Object.prototype.toString.call(xs) === '[object Array]';
+};
+
+
+/***/ }),
+
+/***/ 8121:
+/***/ ((module, exports, __nccwpck_require__) => {
+
+/* eslint-env browser */
+
+/**
+ * This is the web browser implementation of `debug()`.
+ */
+
+exports.formatArgs = formatArgs;
+exports.save = save;
+exports.load = load;
+exports.useColors = useColors;
+exports.storage = localstorage();
+exports.destroy = (() => {
+	let warned = false;
+
+	return () => {
+		if (!warned) {
+			warned = true;
+			console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
+		}
+	};
+})();
+
+/**
+ * Colors.
+ */
+
+exports.colors = [
+	'#0000CC',
+	'#0000FF',
+	'#0033CC',
+	'#0033FF',
+	'#0066CC',
+	'#0066FF',
+	'#0099CC',
+	'#0099FF',
+	'#00CC00',
+	'#00CC33',
+	'#00CC66',
+	'#00CC99',
+	'#00CCCC',
+	'#00CCFF',
+	'#3300CC',
+	'#3300FF',
+	'#3333CC',
+	'#3333FF',
+	'#3366CC',
+	'#3366FF',
+	'#3399CC',
+	'#3399FF',
+	'#33CC00',
+	'#33CC33',
+	'#33CC66',
+	'#33CC99',
+	'#33CCCC',
+	'#33CCFF',
+	'#6600CC',
+	'#6600FF',
+	'#6633CC',
+	'#6633FF',
+	'#66CC00',
+	'#66CC33',
+	'#9900CC',
+	'#9900FF',
+	'#9933CC',
+	'#9933FF',
+	'#99CC00',
+	'#99CC33',
+	'#CC0000',
+	'#CC0033',
+	'#CC0066',
+	'#CC0099',
+	'#CC00CC',
+	'#CC00FF',
+	'#CC3300',
+	'#CC3333',
+	'#CC3366',
+	'#CC3399',
+	'#CC33CC',
+	'#CC33FF',
+	'#CC6600',
+	'#CC6633',
+	'#CC9900',
+	'#CC9933',
+	'#CCCC00',
+	'#CCCC33',
+	'#FF0000',
+	'#FF0033',
+	'#FF0066',
+	'#FF0099',
+	'#FF00CC',
+	'#FF00FF',
+	'#FF3300',
+	'#FF3333',
+	'#FF3366',
+	'#FF3399',
+	'#FF33CC',
+	'#FF33FF',
+	'#FF6600',
+	'#FF6633',
+	'#FF9900',
+	'#FF9933',
+	'#FFCC00',
+	'#FFCC33'
+];
+
+/**
+ * Currently only WebKit-based Web Inspectors, Firefox >= v31,
+ * and the Firebug extension (any Firefox version) are known
+ * to support "%c" CSS customizations.
+ *
+ * TODO: add a `localStorage` variable to explicitly enable/disable colors
+ */
+
+// eslint-disable-next-line complexity
+function useColors() {
+	// NB: In an Electron preload script, document will be defined but not fully
+	// initialized. Since we know we're in Chrome, we'll just detect this case
+	// explicitly
+	if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
+		return true;
+	}
+
+	// Internet Explorer and Edge do not support colors.
+	if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
+		return false;
+	}
+
+	let m;
+
+	// Is webkit? http://stackoverflow.com/a/16459606/376773
+	// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
+	return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
+		// Is firebug? http://stackoverflow.com/a/398120/376773
+		(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
+		// Is firefox >= v31?
+		// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
+		(typeof navigator !== 'undefined' && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31) ||
+		// Double check webkit in userAgent just in case we are in a worker
+		(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
+}
+
+/**
+ * Colorize log arguments if enabled.
+ *
+ * @api public
+ */
+
+function formatArgs(args) {
+	args[0] = (this.useColors ? '%c' : '') +
+		this.namespace +
+		(this.useColors ? ' %c' : ' ') +
+		args[0] +
+		(this.useColors ? '%c ' : ' ') +
+		'+' + module.exports.humanize(this.diff);
+
+	if (!this.useColors) {
+		return;
+	}
+
+	const c = 'color: ' + this.color;
+	args.splice(1, 0, c, 'color: inherit');
+
+	// The final "%c" is somewhat tricky, because there could be other
+	// arguments passed either before or after the %c, so we need to
+	// figure out the correct index to insert the CSS into
+	let index = 0;
+	let lastC = 0;
+	args[0].replace(/%[a-zA-Z%]/g, match => {
+		if (match === '%%') {
+			return;
+		}
+		index++;
+		if (match === '%c') {
+			// We only are interested in the *last* %c
+			// (the user may have provided their own)
+			lastC = index;
+		}
+	});
+
+	args.splice(lastC, 0, c);
+}
+
+/**
+ * Invokes `console.debug()` when available.
+ * No-op when `console.debug` is not a "function".
+ * If `console.debug` is not available, falls back
+ * to `console.log`.
+ *
+ * @api public
+ */
+exports.log = console.debug || console.log || (() => {});
+
+/**
+ * Save `namespaces`.
+ *
+ * @param {String} namespaces
+ * @api private
+ */
+function save(namespaces) {
+	try {
+		if (namespaces) {
+			exports.storage.setItem('debug', namespaces);
+		} else {
+			exports.storage.removeItem('debug');
+		}
+	} catch (error) {
+		// Swallow
+		// XXX (@Qix-) should we be logging these?
+	}
+}
+
+/**
+ * Load `namespaces`.
+ *
+ * @return {String} returns the previously persisted debug modes
+ * @api private
+ */
+function load() {
+	let r;
+	try {
+		r = exports.storage.getItem('debug');
+	} catch (error) {
+		// Swallow
+		// XXX (@Qix-) should we be logging these?
+	}
+
+	// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
+	if (!r && typeof process !== 'undefined' && 'env' in process) {
+		r = process.env.DEBUG;
+	}
+
+	return r;
+}
+
+/**
+ * Localstorage attempts to return the localstorage.
+ *
+ * This is necessary because safari throws
+ * when a user disables cookies/localstorage
+ * and you attempt to access it.
+ *
+ * @return {LocalStorage}
+ * @api private
+ */
+
+function localstorage() {
+	try {
+		// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
+		// The Browser also has localStorage in the global context.
+		return localStorage;
+	} catch (error) {
+		// Swallow
+		// XXX (@Qix-) should we be logging these?
+	}
+}
+
+module.exports = __nccwpck_require__(6192)(exports);
+
+const {formatters} = module.exports;
+
+/**
+ * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
+ */
+
+formatters.j = function (v) {
+	try {
+		return JSON.stringify(v);
+	} catch (error) {
+		return '[UnexpectedJSONParseError]: ' + error.message;
+	}
+};
+
+
+/***/ }),
+
+/***/ 6192:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+/**
+ * This is the common logic for both the Node.js and web browser
+ * implementations of `debug()`.
+ */
+
+function setup(env) {
+	createDebug.debug = createDebug;
+	createDebug.default = createDebug;
+	createDebug.coerce = coerce;
+	createDebug.disable = disable;
+	createDebug.enable = enable;
+	createDebug.enabled = enabled;
+	createDebug.humanize = __nccwpck_require__(2134);
+	createDebug.destroy = destroy;
+
+	Object.keys(env).forEach(key => {
+		createDebug[key] = env[key];
+	});
+
+	/**
+	* The currently active debug mode names, and names to skip.
+	*/
+
+	createDebug.names = [];
+	createDebug.skips = [];
+
+	/**
+	* Map of special "%n" handling functions, for the debug "format" argument.
+	*
+	* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
+	*/
+	createDebug.formatters = {};
+
+	/**
+	* Selects a color for a debug namespace
+	* @param {String} namespace The namespace string for the debug instance to be colored
+	* @return {Number|String} An ANSI color code for the given namespace
+	* @api private
+	*/
+	function selectColor(namespace) {
+		let hash = 0;
+
+		for (let i = 0; i < namespace.length; i++) {
+			hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
+			hash |= 0; // Convert to 32bit integer
+		}
+
+		return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
+	}
+	createDebug.selectColor = selectColor;
+
+	/**
+	* Create a debugger with the given `namespace`.
+	*
+	* @param {String} namespace
+	* @return {Function}
+	* @api public
+	*/
+	function createDebug(namespace) {
+		let prevTime;
+		let enableOverride = null;
+		let namespacesCache;
+		let enabledCache;
+
+		function debug(...args) {
+			// Disabled?
+			if (!debug.enabled) {
+				return;
+			}
+
+			const self = debug;
+
+			// Set `diff` timestamp
+			const curr = Number(new Date());
+			const ms = curr - (prevTime || curr);
+			self.diff = ms;
+			self.prev = prevTime;
+			self.curr = curr;
+			prevTime = curr;
+
+			args[0] = createDebug.coerce(args[0]);
+
+			if (typeof args[0] !== 'string') {
+				// Anything else let's inspect with %O
+				args.unshift('%O');
+			}
+
+			// Apply any `formatters` transformations
+			let index = 0;
+			args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
+				// If we encounter an escaped % then don't increase the array index
+				if (match === '%%') {
+					return '%';
+				}
+				index++;
+				const formatter = createDebug.formatters[format];
+				if (typeof formatter === 'function') {
+					const val = args[index];
+					match = formatter.call(self, val);
+
+					// Now we need to remove `args[index]` since it's inlined in the `format`
+					args.splice(index, 1);
+					index--;
+				}
+				return match;
+			});
+
+			// Apply env-specific formatting (colors, etc.)
+			createDebug.formatArgs.call(self, args);
+
+			const logFn = self.log || createDebug.log;
+			logFn.apply(self, args);
+		}
+
+		debug.namespace = namespace;
+		debug.useColors = createDebug.useColors();
+		debug.color = createDebug.selectColor(namespace);
+		debug.extend = extend;
+		debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.
+
+		Object.defineProperty(debug, 'enabled', {
+			enumerable: true,
+			configurable: false,
+			get: () => {
+				if (enableOverride !== null) {
+					return enableOverride;
+				}
+				if (namespacesCache !== createDebug.namespaces) {
+					namespacesCache = createDebug.namespaces;
+					enabledCache = createDebug.enabled(namespace);
+				}
+
+				return enabledCache;
+			},
+			set: v => {
+				enableOverride = v;
+			}
+		});
+
+		// Env-specific initialization logic for debug instances
+		if (typeof createDebug.init === 'function') {
+			createDebug.init(debug);
+		}
+
+		return debug;
+	}
+
+	function extend(namespace, delimiter) {
+		const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
+		newDebug.log = this.log;
+		return newDebug;
+	}
+
+	/**
+	* Enables a debug mode by namespaces. This can include modes
+	* separated by a colon and wildcards.
+	*
+	* @param {String} namespaces
+	* @api public
+	*/
+	function enable(namespaces) {
+		createDebug.save(namespaces);
+		createDebug.namespaces = namespaces;
+
+		createDebug.names = [];
+		createDebug.skips = [];
+
+		let i;
+		const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
+		const len = split.length;
+
+		for (i = 0; i < len; i++) {
+			if (!split[i]) {
+				// ignore empty strings
+				continue;
+			}
+
+			namespaces = split[i].replace(/\*/g, '.*?');
+
+			if (namespaces[0] === '-') {
+				createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
+			} else {
+				createDebug.names.push(new RegExp('^' + namespaces + '$'));
+			}
+		}
+	}
+
+	/**
+	* Disable debug output.
+	*
+	* @return {String} namespaces
+	* @api public
+	*/
+	function disable() {
+		const namespaces = [
+			...createDebug.names.map(toNamespace),
+			...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
+		].join(',');
+		createDebug.enable('');
+		return namespaces;
+	}
+
+	/**
+	* Returns true if the given mode name is enabled, false otherwise.
+	*
+	* @param {String} name
+	* @return {Boolean}
+	* @api public
+	*/
+	function enabled(name) {
+		if (name[name.length - 1] === '*') {
+			return true;
+		}
+
+		let i;
+		let len;
+
+		for (i = 0, len = createDebug.skips.length; i < len; i++) {
+			if (createDebug.skips[i].test(name)) {
+				return false;
+			}
+		}
+
+		for (i = 0, len = createDebug.names.length; i < len; i++) {
+			if (createDebug.names[i].test(name)) {
+				return true;
+			}
+		}
+
+		return false;
+	}
+
+	/**
+	* Convert regexp to namespace
+	*
+	* @param {RegExp} regxep
+	* @return {String} namespace
+	* @api private
+	*/
+	function toNamespace(regexp) {
+		return regexp.toString()
+			.substring(2, regexp.toString().length - 2)
+			.replace(/\.\*\?$/, '*');
+	}
+
+	/**
+	* Coerce `val`.
+	*
+	* @param {Mixed} val
+	* @return {Mixed}
+	* @api private
+	*/
+	function coerce(val) {
+		if (val instanceof Error) {
+			return val.stack || val.message;
+		}
+		return val;
+	}
+
+	/**
+	* XXX DO NOT USE. This is a temporary stub function.
+	* XXX It WILL be removed in the next major release.
+	*/
+	function destroy() {
+		console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
+	}
+
+	createDebug.enable(createDebug.load());
+
+	return createDebug;
+}
+
+module.exports = setup;
+
+
+/***/ }),
+
+/***/ 8441:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+/**
+ * Detect Electron renderer / nwjs process, which is node, but we should
+ * treat as a browser.
+ */
+
+if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) {
+	module.exports = __nccwpck_require__(8121);
+} else {
+	module.exports = __nccwpck_require__(3537);
+}
+
+
+/***/ }),
+
+/***/ 3537:
+/***/ ((module, exports, __nccwpck_require__) => {
+
+/**
+ * Module dependencies.
+ */
+
+const tty = __nccwpck_require__(2018);
+const util = __nccwpck_require__(9023);
+
+/**
+ * This is the Node.js implementation of `debug()`.
+ */
+
+exports.init = init;
+exports.log = log;
+exports.formatArgs = formatArgs;
+exports.save = save;
+exports.load = load;
+exports.useColors = useColors;
+exports.destroy = util.deprecate(
+	() => {},
+	'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'
+);
+
+/**
+ * Colors.
+ */
+
+exports.colors = [6, 2, 3, 4, 5, 1];
+
+try {
+	// Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
+	// eslint-disable-next-line import/no-extraneous-dependencies
+	const supportsColor = __nccwpck_require__(1953);
+
+	if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
+		exports.colors = [
+			20,
+			21,
+			26,
+			27,
+			32,
+			33,
+			38,
+			39,
+			40,
+			41,
+			42,
+			43,
+			44,
+			45,
+			56,
+			57,
+			62,
+			63,
+			68,
+			69,
+			74,
+			75,
+			76,
+			77,
+			78,
+			79,
+			80,
+			81,
+			92,
+			93,
+			98,
+			99,
+			112,
+			113,
+			128,
+			129,
+			134,
+			135,
+			148,
+			149,
+			160,
+			161,
+			162,
+			163,
+			164,
+			165,
+			166,
+			167,
+			168,
+			169,
+			170,
+			171,
+			172,
+			173,
+			178,
+			179,
+			184,
+			185,
+			196,
+			197,
+			198,
+			199,
+			200,
+			201,
+			202,
+			203,
+			204,
+			205,
+			206,
+			207,
+			208,
+			209,
+			214,
+			215,
+			220,
+			221
+		];
+	}
+} catch (error) {
+	// Swallow - we only care if `supports-color` is available; it doesn't have to be.
+}
+
+/**
+ * Build up the default `inspectOpts` object from the environment variables.
+ *
+ *   $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
+ */
+
+exports.inspectOpts = Object.keys(process.env).filter(key => {
+	return /^debug_/i.test(key);
+}).reduce((obj, key) => {
+	// Camel-case
+	const prop = key
+		.substring(6)
+		.toLowerCase()
+		.replace(/_([a-z])/g, (_, k) => {
+			return k.toUpperCase();
+		});
+
+	// Coerce string value into JS value
+	let val = process.env[key];
+	if (/^(yes|on|true|enabled)$/i.test(val)) {
+		val = true;
+	} else if (/^(no|off|false|disabled)$/i.test(val)) {
+		val = false;
+	} else if (val === 'null') {
+		val = null;
+	} else {
+		val = Number(val);
+	}
+
+	obj[prop] = val;
+	return obj;
+}, {});
+
+/**
+ * Is stdout a TTY? Colored output is enabled when `true`.
+ */
+
+function useColors() {
+	return 'colors' in exports.inspectOpts ?
+		Boolean(exports.inspectOpts.colors) :
+		tty.isatty(process.stderr.fd);
+}
+
+/**
+ * Adds ANSI color escape codes if enabled.
+ *
+ * @api public
+ */
+
+function formatArgs(args) {
+	const {namespace: name, useColors} = this;
+
+	if (useColors) {
+		const c = this.color;
+		const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c);
+		const prefix = `  ${colorCode};1m${name} \u001B[0m`;
+
+		args[0] = prefix + args[0].split('\n').join('\n' + prefix);
+		args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m');
+	} else {
+		args[0] = getDate() + name + ' ' + args[0];
+	}
+}
+
+function getDate() {
+	if (exports.inspectOpts.hideDate) {
+		return '';
+	}
+	return new Date().toISOString() + ' ';
+}
+
+/**
+ * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr.
+ */
+
+function log(...args) {
+	return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n');
+}
+
+/**
+ * Save `namespaces`.
+ *
+ * @param {String} namespaces
+ * @api private
+ */
+function save(namespaces) {
+	if (namespaces) {
+		process.env.DEBUG = namespaces;
+	} else {
+		// If you set a process.env field to null or undefined, it gets cast to the
+		// string 'null' or 'undefined'. Just delete instead.
+		delete process.env.DEBUG;
+	}
+}
+
+/**
+ * Load `namespaces`.
+ *
+ * @return {String} returns the previously persisted debug modes
+ * @api private
+ */
+
+function load() {
+	return process.env.DEBUG;
+}
+
+/**
+ * Init logic for `debug` instances.
+ *
+ * Create a new `inspectOpts` object in case `useColors` is set
+ * differently for a particular `debug` instance.
+ */
+
+function init(debug) {
+	debug.inspectOpts = {};
+
+	const keys = Object.keys(exports.inspectOpts);
+	for (let i = 0; i < keys.length; i++) {
+		debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
+	}
+}
+
+module.exports = __nccwpck_require__(6192)(exports);
+
+const {formatters} = module.exports;
+
+/**
+ * Map %o to `util.inspect()`, all on a single line.
+ */
+
+formatters.o = function (v) {
+	this.inspectOpts.colors = this.useColors;
+	return util.inspect(v, this.inspectOpts)
+		.split('\n')
+		.map(str => str.trim())
+		.join(' ');
+};
+
+/**
+ * Map %O to `util.inspect()`, allowing multiple lines if needed.
+ */
+
+formatters.O = function (v) {
+	this.inspectOpts.colors = this.useColors;
+	return util.inspect(v, this.inspectOpts);
+};
+
+
+/***/ }),
+
+/***/ 4010:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {Transform, PassThrough} = __nccwpck_require__(2203);
+const zlib = __nccwpck_require__(3106);
+const mimicResponse = __nccwpck_require__(9804);
+
+module.exports = response => {
+	const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();
+
+	if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
+		return response;
+	}
+
+	// TODO: Remove this when targeting Node.js 12.
+	const isBrotli = contentEncoding === 'br';
+	if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {
+		response.destroy(new Error('Brotli is not supported on Node.js < 12'));
+		return response;
+	}
+
+	let isEmpty = true;
+
+	const checker = new Transform({
+		transform(data, _encoding, callback) {
+			isEmpty = false;
+
+			callback(null, data);
+		},
+
+		flush(callback) {
+			callback();
+		}
+	});
+
+	const finalStream = new PassThrough({
+		autoDestroy: false,
+		destroy(error, callback) {
+			response.destroy();
+
+			callback(error);
+		}
+	});
+
+	const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();
+
+	decompressStream.once('error', error => {
+		if (isEmpty && !response.readable) {
+			finalStream.end();
+			return;
+		}
+
+		finalStream.destroy(error);
+	});
+
+	mimicResponse(response, finalStream);
+	response.pipe(checker).pipe(decompressStream).pipe(finalStream);
+
+	return finalStream;
+};
+
+
+/***/ }),
+
+/***/ 7596:
+/***/ ((module, exports) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function isTLSSocket(socket) {
+    return socket.encrypted;
+}
+const deferToConnect = (socket, fn) => {
+    let listeners;
+    if (typeof fn === 'function') {
+        const connect = fn;
+        listeners = { connect };
+    }
+    else {
+        listeners = fn;
+    }
+    const hasConnectListener = typeof listeners.connect === 'function';
+    const hasSecureConnectListener = typeof listeners.secureConnect === 'function';
+    const hasCloseListener = typeof listeners.close === 'function';
+    const onConnect = () => {
+        if (hasConnectListener) {
+            listeners.connect();
+        }
+        if (isTLSSocket(socket) && hasSecureConnectListener) {
+            if (socket.authorized) {
+                listeners.secureConnect();
+            }
+            else if (!socket.authorizationError) {
+                socket.once('secureConnect', listeners.secureConnect);
+            }
+        }
+        if (hasCloseListener) {
+            socket.once('close', listeners.close);
+        }
+    };
+    if (socket.writable && !socket.connecting) {
+        onConnect();
+    }
+    else if (socket.connecting) {
+        socket.once('connect', onConnect);
+    }
+    else if (socket.destroyed && hasCloseListener) {
+        listeners.close(socket._hadError);
+    }
+};
+exports["default"] = deferToConnect;
+// For CommonJS default export support
+module.exports = deferToConnect;
+module.exports["default"] = deferToConnect;
+
+
+/***/ }),
+
+/***/ 7208:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const validator = __nccwpck_require__(7520);
+const XMLParser = __nccwpck_require__(3981);
+const XMLBuilder = __nccwpck_require__(2462);
+
+module.exports = {
+  XMLParser: XMLParser,
+  XMLValidator: validator,
+  XMLBuilder: XMLBuilder
+}
+
+/***/ }),
+
+/***/ 8123:
+/***/ ((module) => {
+
+function getIgnoreAttributesFn(ignoreAttributes) {
+    if (typeof ignoreAttributes === 'function') {
+        return ignoreAttributes
+    }
+    if (Array.isArray(ignoreAttributes)) {
+        return (attrName) => {
+            for (const pattern of ignoreAttributes) {
+                if (typeof pattern === 'string' && attrName === pattern) {
+                    return true
+                }
+                if (pattern instanceof RegExp && pattern.test(attrName)) {
+                    return true
+                }
+            }
+        }
+    }
+    return () => false
+}
+
+module.exports = getIgnoreAttributesFn
+
+/***/ }),
+
+/***/ 8784:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+
+const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD';
+const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040';
+const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*'
+const regexName = new RegExp('^' + nameRegexp + '$');
+
+const getAllMatches = function(string, regex) {
+  const matches = [];
+  let match = regex.exec(string);
+  while (match) {
+    const allmatches = [];
+    allmatches.startIndex = regex.lastIndex - match[0].length;
+    const len = match.length;
+    for (let index = 0; index < len; index++) {
+      allmatches.push(match[index]);
+    }
+    matches.push(allmatches);
+    match = regex.exec(string);
+  }
+  return matches;
+};
+
+const isName = function(string) {
+  const match = regexName.exec(string);
+  return !(match === null || typeof match === 'undefined');
+};
+
+exports.isExist = function(v) {
+  return typeof v !== 'undefined';
+};
+
+exports.isEmptyObject = function(obj) {
+  return Object.keys(obj).length === 0;
+};
+
+/**
+ * Copy all the properties of a into b.
+ * @param {*} target
+ * @param {*} a
+ */
+exports.merge = function(target, a, arrayMode) {
+  if (a) {
+    const keys = Object.keys(a); // will return an array of own properties
+    const len = keys.length; //don't make it inline
+    for (let i = 0; i < len; i++) {
+      if (arrayMode === 'strict') {
+        target[keys[i]] = [ a[keys[i]] ];
+      } else {
+        target[keys[i]] = a[keys[i]];
+      }
+    }
+  }
+};
+/* exports.merge =function (b,a){
+  return Object.assign(b,a);
+} */
+
+exports.getValue = function(v) {
+  if (exports.isExist(v)) {
+    return v;
+  } else {
+    return '';
+  }
+};
+
+// const fakeCall = function(a) {return a;};
+// const fakeCallNoReturn = function() {};
+
+exports.isName = isName;
+exports.getAllMatches = getAllMatches;
+exports.nameRegexp = nameRegexp;
+
+
+/***/ }),
+
+/***/ 7520:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+
+const util = __nccwpck_require__(8784);
+
+const defaultOptions = {
+  allowBooleanAttributes: false, //A tag can have attributes without any value
+  unpairedTags: []
+};
+
+//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g");
+exports.validate = function (xmlData, options) {
+  options = Object.assign({}, defaultOptions, options);
+
+  //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line
+  //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag
+  //xmlData = xmlData.replace(/(<!DOCTYPE[\s\w\"\.\/\-\:]+(\[.*\])*\s*>)/g,"");//Remove DOCTYPE
+  const tags = [];
+  let tagFound = false;
+
+  //indicates that the root tag has been closed (aka. depth 0 has been reached)
+  let reachedRoot = false;
+
+  if (xmlData[0] === '\ufeff') {
+    // check for byte order mark (BOM)
+    xmlData = xmlData.substr(1);
+  }
+  
+  for (let i = 0; i < xmlData.length; i++) {
+
+    if (xmlData[i] === '<' && xmlData[i+1] === '?') {
+      i+=2;
+      i = readPI(xmlData,i);
+      if (i.err) return i;
+    }else if (xmlData[i] === '<') {
+      //starting of tag
+      //read until you reach to '>' avoiding any '>' in attribute value
+      let tagStartPos = i;
+      i++;
+      
+      if (xmlData[i] === '!') {
+        i = readCommentAndCDATA(xmlData, i);
+        continue;
+      } else {
+        let closingTag = false;
+        if (xmlData[i] === '/') {
+          //closing tag
+          closingTag = true;
+          i++;
+        }
+        //read tagname
+        let tagName = '';
+        for (; i < xmlData.length &&
+          xmlData[i] !== '>' &&
+          xmlData[i] !== ' ' &&
+          xmlData[i] !== '\t' &&
+          xmlData[i] !== '\n' &&
+          xmlData[i] !== '\r'; i++
+        ) {
+          tagName += xmlData[i];
+        }
+        tagName = tagName.trim();
+        //console.log(tagName);
+
+        if (tagName[tagName.length - 1] === '/') {
+          //self closing tag without attributes
+          tagName = tagName.substring(0, tagName.length - 1);
+          //continue;
+          i--;
+        }
+        if (!validateTagName(tagName)) {
+          let msg;
+          if (tagName.trim().length === 0) {
+            msg = "Invalid space after '<'.";
+          } else {
+            msg = "Tag '"+tagName+"' is an invalid name.";
+          }
+          return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i));
+        }
+
+        const result = readAttributeStr(xmlData, i);
+        if (result === false) {
+          return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i));
+        }
+        let attrStr = result.value;
+        i = result.index;
+
+        if (attrStr[attrStr.length - 1] === '/') {
+          //self closing tag
+          const attrStrStart = i - attrStr.length;
+          attrStr = attrStr.substring(0, attrStr.length - 1);
+          const isValid = validateAttributeString(attrStr, options);
+          if (isValid === true) {
+            tagFound = true;
+            //continue; //text may presents after self closing tag
+          } else {
+            //the result from the nested function returns the position of the error within the attribute
+            //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute
+            //this gives us the absolute index in the entire xml, which we can use to find the line at last
+            return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line));
+          }
+        } else if (closingTag) {
+          if (!result.tagClosed) {
+            return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i));
+          } else if (attrStr.trim().length > 0) {
+            return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos));
+          } else if (tags.length === 0) {
+            return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos));
+          } else {
+            const otg = tags.pop();
+            if (tagName !== otg.tagName) {
+              let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos);
+              return getErrorObject('InvalidTag',
+                "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.",
+                getLineNumberForPosition(xmlData, tagStartPos));
+            }
+
+            //when there are no more tags, we reached the root level.
+            if (tags.length == 0) {
+              reachedRoot = true;
+            }
+          }
+        } else {
+          const isValid = validateAttributeString(attrStr, options);
+          if (isValid !== true) {
+            //the result from the nested function returns the position of the error within the attribute
+            //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute
+            //this gives us the absolute index in the entire xml, which we can use to find the line at last
+            return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line));
+          }
+
+          //if the root level has been reached before ...
+          if (reachedRoot === true) {
+            return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i));
+          } else if(options.unpairedTags.indexOf(tagName) !== -1){
+            //don't push into stack
+          } else {
+            tags.push({tagName, tagStartPos});
+          }
+          tagFound = true;
+        }
+
+        //skip tag text value
+        //It may include comments and CDATA value
+        for (i++; i < xmlData.length; i++) {
+          if (xmlData[i] === '<') {
+            if (xmlData[i + 1] === '!') {
+              //comment or CADATA
+              i++;
+              i = readCommentAndCDATA(xmlData, i);
+              continue;
+            } else if (xmlData[i+1] === '?') {
+              i = readPI(xmlData, ++i);
+              if (i.err) return i;
+            } else{
+              break;
+            }
+          } else if (xmlData[i] === '&') {
+            const afterAmp = validateAmpersand(xmlData, i);
+            if (afterAmp == -1)
+              return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i));
+            i = afterAmp;
+          }else{
+            if (reachedRoot === true && !isWhiteSpace(xmlData[i])) {
+              return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i));
+            }
+          }
+        } //end of reading tag text value
+        if (xmlData[i] === '<') {
+          i--;
+        }
+      }
+    } else {
+      if ( isWhiteSpace(xmlData[i])) {
+        continue;
+      }
+      return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i));
+    }
+  }
+
+  if (!tagFound) {
+    return getErrorObject('InvalidXml', 'Start tag expected.', 1);
+  }else if (tags.length == 1) {
+      return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos));
+  }else if (tags.length > 0) {
+      return getErrorObject('InvalidXml', "Invalid '"+
+          JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+
+          "' found.", {line: 1, col: 1});
+  }
+
+  return true;
+};
+
+function isWhiteSpace(char){
+  return char === ' ' || char === '\t' || char === '\n'  || char === '\r';
+}
+/**
+ * Read Processing insstructions and skip
+ * @param {*} xmlData
+ * @param {*} i
+ */
+function readPI(xmlData, i) {
+  const start = i;
+  for (; i < xmlData.length; i++) {
+    if (xmlData[i] == '?' || xmlData[i] == ' ') {
+      //tagname
+      const tagname = xmlData.substr(start, i - start);
+      if (i > 5 && tagname === 'xml') {
+        return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i));
+      } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') {
+        //check if valid attribut string
+        i++;
+        break;
+      } else {
+        continue;
+      }
+    }
+  }
+  return i;
+}
+
+function readCommentAndCDATA(xmlData, i) {
+  if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') {
+    //comment
+    for (i += 3; i < xmlData.length; i++) {
+      if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') {
+        i += 2;
+        break;
+      }
+    }
+  } else if (
+    xmlData.length > i + 8 &&
+    xmlData[i + 1] === 'D' &&
+    xmlData[i + 2] === 'O' &&
+    xmlData[i + 3] === 'C' &&
+    xmlData[i + 4] === 'T' &&
+    xmlData[i + 5] === 'Y' &&
+    xmlData[i + 6] === 'P' &&
+    xmlData[i + 7] === 'E'
+  ) {
+    let angleBracketsCount = 1;
+    for (i += 8; i < xmlData.length; i++) {
+      if (xmlData[i] === '<') {
+        angleBracketsCount++;
+      } else if (xmlData[i] === '>') {
+        angleBracketsCount--;
+        if (angleBracketsCount === 0) {
+          break;
+        }
+      }
+    }
+  } else if (
+    xmlData.length > i + 9 &&
+    xmlData[i + 1] === '[' &&
+    xmlData[i + 2] === 'C' &&
+    xmlData[i + 3] === 'D' &&
+    xmlData[i + 4] === 'A' &&
+    xmlData[i + 5] === 'T' &&
+    xmlData[i + 6] === 'A' &&
+    xmlData[i + 7] === '['
+  ) {
+    for (i += 8; i < xmlData.length; i++) {
+      if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') {
+        i += 2;
+        break;
+      }
+    }
+  }
+
+  return i;
+}
+
+const doubleQuote = '"';
+const singleQuote = "'";
+
+/**
+ * Keep reading xmlData until '<' is found outside the attribute value.
+ * @param {string} xmlData
+ * @param {number} i
+ */
+function readAttributeStr(xmlData, i) {
+  let attrStr = '';
+  let startChar = '';
+  let tagClosed = false;
+  for (; i < xmlData.length; i++) {
+    if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) {
+      if (startChar === '') {
+        startChar = xmlData[i];
+      } else if (startChar !== xmlData[i]) {
+        //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa
+      } else {
+        startChar = '';
+      }
+    } else if (xmlData[i] === '>') {
+      if (startChar === '') {
+        tagClosed = true;
+        break;
+      }
+    }
+    attrStr += xmlData[i];
+  }
+  if (startChar !== '') {
+    return false;
+  }
+
+  return {
+    value: attrStr,
+    index: i,
+    tagClosed: tagClosed
+  };
+}
+
+/**
+ * Select all the attributes whether valid or invalid.
+ */
+const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g');
+
+//attr, ="sd", a="amit's", a="sd"b="saf", ab  cd=""
+
+function validateAttributeString(attrStr, options) {
+  //console.log("start:"+attrStr+":end");
+
+  //if(attrStr.trim().length === 0) return true; //empty string
+
+  const matches = util.getAllMatches(attrStr, validAttrStrRegxp);
+  const attrNames = {};
+
+  for (let i = 0; i < matches.length; i++) {
+    if (matches[i][1].length === 0) {
+      //nospace before attribute name: a="sd"b="saf"
+      return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i]))
+    } else if (matches[i][3] !== undefined && matches[i][4] === undefined) {
+      return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i]));
+    } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) {
+      //independent attribute: ab
+      return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i]));
+    }
+    /* else if(matches[i][6] === undefined){//attribute without value: ab=
+                    return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}};
+                } */
+    const attrName = matches[i][2];
+    if (!validateAttrName(attrName)) {
+      return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i]));
+    }
+    if (!attrNames.hasOwnProperty(attrName)) {
+      //check for duplicate attribute.
+      attrNames[attrName] = 1;
+    } else {
+      return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i]));
+    }
+  }
+
+  return true;
+}
+
+function validateNumberAmpersand(xmlData, i) {
+  let re = /\d/;
+  if (xmlData[i] === 'x') {
+    i++;
+    re = /[\da-fA-F]/;
+  }
+  for (; i < xmlData.length; i++) {
+    if (xmlData[i] === ';')
+      return i;
+    if (!xmlData[i].match(re))
+      break;
+  }
+  return -1;
+}
+
+function validateAmpersand(xmlData, i) {
+  // https://www.w3.org/TR/xml/#dt-charref
+  i++;
+  if (xmlData[i] === ';')
+    return -1;
+  if (xmlData[i] === '#') {
+    i++;
+    return validateNumberAmpersand(xmlData, i);
+  }
+  let count = 0;
+  for (; i < xmlData.length; i++, count++) {
+    if (xmlData[i].match(/\w/) && count < 20)
+      continue;
+    if (xmlData[i] === ';')
+      break;
+    return -1;
+  }
+  return i;
+}
+
+function getErrorObject(code, message, lineNumber) {
+  return {
+    err: {
+      code: code,
+      msg: message,
+      line: lineNumber.line || lineNumber,
+      col: lineNumber.col,
+    },
+  };
+}
+
+function validateAttrName(attrName) {
+  return util.isName(attrName);
+}
+
+// const startsWithXML = /^xml/i;
+
+function validateTagName(tagname) {
+  return util.isName(tagname) /* && !tagname.match(startsWithXML) */;
+}
+
+//this function returns the line number for the character at the given index
+function getLineNumberForPosition(xmlData, index) {
+  const lines = xmlData.substring(0, index).split(/\r?\n/);
+  return {
+    line: lines.length,
+
+    // column number is last line's length + 1, because column numbering starts at 1:
+    col: lines[lines.length - 1].length + 1
+  };
+}
+
+//this function returns the position of the first character of match within attrStr
+function getPositionFromMatch(match) {
+  return match.startIndex + match[1].length;
+}
+
+
+/***/ }),
+
+/***/ 2462:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+//parse Empty Node as self closing node
+const buildFromOrderedJs = __nccwpck_require__(5778);
+const getIgnoreAttributesFn = __nccwpck_require__(8123)
+
+const defaultOptions = {
+  attributeNamePrefix: '@_',
+  attributesGroupName: false,
+  textNodeName: '#text',
+  ignoreAttributes: true,
+  cdataPropName: false,
+  format: false,
+  indentBy: '  ',
+  suppressEmptyNode: false,
+  suppressUnpairedNode: true,
+  suppressBooleanAttributes: true,
+  tagValueProcessor: function(key, a) {
+    return a;
+  },
+  attributeValueProcessor: function(attrName, a) {
+    return a;
+  },
+  preserveOrder: false,
+  commentPropName: false,
+  unpairedTags: [],
+  entities: [
+    { regex: new RegExp("&", "g"), val: "&amp;" },//it must be on top
+    { regex: new RegExp(">", "g"), val: "&gt;" },
+    { regex: new RegExp("<", "g"), val: "&lt;" },
+    { regex: new RegExp("\'", "g"), val: "&apos;" },
+    { regex: new RegExp("\"", "g"), val: "&quot;" }
+  ],
+  processEntities: true,
+  stopNodes: [],
+  // transformTagName: false,
+  // transformAttributeName: false,
+  oneListGroup: false
+};
+
+function Builder(options) {
+  this.options = Object.assign({}, defaultOptions, options);
+  if (this.options.ignoreAttributes === true || this.options.attributesGroupName) {
+    this.isAttribute = function(/*a*/) {
+      return false;
+    };
+  } else {
+    this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes)
+    this.attrPrefixLen = this.options.attributeNamePrefix.length;
+    this.isAttribute = isAttribute;
+  }
+
+  this.processTextOrObjNode = processTextOrObjNode
+
+  if (this.options.format) {
+    this.indentate = indentate;
+    this.tagEndChar = '>\n';
+    this.newLine = '\n';
+  } else {
+    this.indentate = function() {
+      return '';
+    };
+    this.tagEndChar = '>';
+    this.newLine = '';
+  }
+}
+
+Builder.prototype.build = function(jObj) {
+  if(this.options.preserveOrder){
+    return buildFromOrderedJs(jObj, this.options);
+  }else {
+    if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){
+      jObj = {
+        [this.options.arrayNodeName] : jObj
+      }
+    }
+    return this.j2x(jObj, 0, []).val;
+  }
+};
+
+Builder.prototype.j2x = function(jObj, level, ajPath) {
+  let attrStr = '';
+  let val = '';
+  const jPath = ajPath.join('.')
+  for (let key in jObj) {
+    if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue;
+    if (typeof jObj[key] === 'undefined') {
+      // supress undefined node only if it is not an attribute
+      if (this.isAttribute(key)) {
+        val += '';
+      }
+    } else if (jObj[key] === null) {
+      // null attribute should be ignored by the attribute list, but should not cause the tag closing
+      if (this.isAttribute(key)) {
+        val += '';
+      } else if (key[0] === '?') {
+        val += this.indentate(level) + '<' + key + '?' + this.tagEndChar;
+      } else {
+        val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;
+      }
+      // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;
+    } else if (jObj[key] instanceof Date) {
+      val += this.buildTextValNode(jObj[key], key, '', level);
+    } else if (typeof jObj[key] !== 'object') {
+      //premitive type
+      const attr = this.isAttribute(key);
+      if (attr && !this.ignoreAttributesFn(attr, jPath)) {
+        attrStr += this.buildAttrPairStr(attr, '' + jObj[key]);
+      } else if (!attr) {
+        //tag value
+        if (key === this.options.textNodeName) {
+          let newval = this.options.tagValueProcessor(key, '' + jObj[key]);
+          val += this.replaceEntitiesValue(newval);
+        } else {
+          val += this.buildTextValNode(jObj[key], key, '', level);
+        }
+      }
+    } else if (Array.isArray(jObj[key])) {
+      //repeated nodes
+      const arrLen = jObj[key].length;
+      let listTagVal = "";
+      let listTagAttr = "";
+      for (let j = 0; j < arrLen; j++) {
+        const item = jObj[key][j];
+        if (typeof item === 'undefined') {
+          // supress undefined node
+        } else if (item === null) {
+          if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar;
+          else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;
+          // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar;
+        } else if (typeof item === 'object') {
+          if(this.options.oneListGroup){
+            const result = this.j2x(item, level + 1, ajPath.concat(key));
+            listTagVal += result.val;
+            if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) {
+              listTagAttr += result.attrStr
+            }
+          }else{
+            listTagVal += this.processTextOrObjNode(item, key, level, ajPath)
+          }
+        } else {
+          if (this.options.oneListGroup) {
+            let textValue = this.options.tagValueProcessor(key, item);
+            textValue = this.replaceEntitiesValue(textValue);
+            listTagVal += textValue;
+          } else {
+            listTagVal += this.buildTextValNode(item, key, '', level);
+          }
+        }
+      }
+      if(this.options.oneListGroup){
+        listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level);
+      }
+      val += listTagVal;
+    } else {
+      //nested node
+      if (this.options.attributesGroupName && key === this.options.attributesGroupName) {
+        const Ks = Object.keys(jObj[key]);
+        const L = Ks.length;
+        for (let j = 0; j < L; j++) {
+          attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]);
+        }
+      } else {
+        val += this.processTextOrObjNode(jObj[key], key, level, ajPath)
+      }
+    }
+  }
+  return {attrStr: attrStr, val: val};
+};
+
+Builder.prototype.buildAttrPairStr = function(attrName, val){
+  val = this.options.attributeValueProcessor(attrName, '' + val);
+  val = this.replaceEntitiesValue(val);
+  if (this.options.suppressBooleanAttributes && val === "true") {
+    return ' ' + attrName;
+  } else return ' ' + attrName + '="' + val + '"';
+}
+
+function processTextOrObjNode (object, key, level, ajPath) {
+  const result = this.j2x(object, level + 1, ajPath.concat(key));
+  if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) {
+    return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level);
+  } else {
+    return this.buildObjectNode(result.val, key, result.attrStr, level);
+  }
+}
+
+Builder.prototype.buildObjectNode = function(val, key, attrStr, level) {
+  if(val === ""){
+    if(key[0] === "?") return  this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar;
+    else {
+      return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar;
+    }
+  }else{
+
+    let tagEndExp = '</' + key + this.tagEndChar;
+    let piClosingChar = "";
+    
+    if(key[0] === "?") {
+      piClosingChar = "?";
+      tagEndExp = "";
+    }
+  
+    // attrStr is an empty string in case the attribute came as undefined or null
+    if ((attrStr || attrStr === '') && val.indexOf('<') === -1) {
+      return ( this.indentate(level) + '<' +  key + attrStr + piClosingChar + '>' + val + tagEndExp );
+    } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) {
+      return this.indentate(level) + `<!--${val}-->` + this.newLine;
+    }else {
+      return (
+        this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar +
+        val +
+        this.indentate(level) + tagEndExp    );
+    }
+  }
+}
+
+Builder.prototype.closeTag = function(key){
+  let closeTag = "";
+  if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired
+    if(!this.options.suppressUnpairedNode) closeTag = "/"
+  }else if(this.options.suppressEmptyNode){ //empty
+    closeTag = "/";
+  }else{
+    closeTag = `></${key}`
+  }
+  return closeTag;
+}
+
+function buildEmptyObjNode(val, key, attrStr, level) {
+  if (val !== '') {
+    return this.buildObjectNode(val, key, attrStr, level);
+  } else {
+    if(key[0] === "?") return  this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar;
+    else {
+      return  this.indentate(level) + '<' + key + attrStr + '/' + this.tagEndChar;
+      // return this.buildTagStr(level,key, attrStr);
+    }
+  }
+}
+
+Builder.prototype.buildTextValNode = function(val, key, attrStr, level) {
+  if (this.options.cdataPropName !== false && key === this.options.cdataPropName) {
+    return this.indentate(level) + `<![CDATA[${val}]]>` +  this.newLine;
+  }else if (this.options.commentPropName !== false && key === this.options.commentPropName) {
+    return this.indentate(level) + `<!--${val}-->` +  this.newLine;
+  }else if(key[0] === "?") {//PI tag
+    return  this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; 
+  }else{
+    let textValue = this.options.tagValueProcessor(key, val);
+    textValue = this.replaceEntitiesValue(textValue);
+  
+    if( textValue === ''){
+      return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar;
+    }else{
+      return this.indentate(level) + '<' + key + attrStr + '>' +
+         textValue +
+        '</' + key + this.tagEndChar;
+    }
+  }
+}
+
+Builder.prototype.replaceEntitiesValue = function(textValue){
+  if(textValue && textValue.length > 0 && this.options.processEntities){
+    for (let i=0; i<this.options.entities.length; i++) {
+      const entity = this.options.entities[i];
+      textValue = textValue.replace(entity.regex, entity.val);
+    }
+  }
+  return textValue;
+}
+
+function indentate(level) {
+  return this.options.indentBy.repeat(level);
+}
+
+function isAttribute(name /*, options*/) {
+  if (name.startsWith(this.options.attributeNamePrefix) && name !== this.options.textNodeName) {
+    return name.substr(this.attrPrefixLen);
+  } else {
+    return false;
+  }
+}
+
+module.exports = Builder;
+
+
+/***/ }),
+
+/***/ 5778:
+/***/ ((module) => {
+
+const EOL = "\n";
+
+/**
+ * 
+ * @param {array} jArray 
+ * @param {any} options 
+ * @returns 
+ */
+function toXml(jArray, options) {
+    let indentation = "";
+    if (options.format && options.indentBy.length > 0) {
+        indentation = EOL;
+    }
+    return arrToStr(jArray, options, "", indentation);
+}
+
+function arrToStr(arr, options, jPath, indentation) {
+    let xmlStr = "";
+    let isPreviousElementTag = false;
+
+    for (let i = 0; i < arr.length; i++) {
+        const tagObj = arr[i];
+        const tagName = propName(tagObj);
+        if(tagName === undefined) continue;
+
+        let newJPath = "";
+        if (jPath.length === 0) newJPath = tagName
+        else newJPath = `${jPath}.${tagName}`;
+
+        if (tagName === options.textNodeName) {
+            let tagText = tagObj[tagName];
+            if (!isStopNode(newJPath, options)) {
+                tagText = options.tagValueProcessor(tagName, tagText);
+                tagText = replaceEntitiesValue(tagText, options);
+            }
+            if (isPreviousElementTag) {
+                xmlStr += indentation;
+            }
+            xmlStr += tagText;
+            isPreviousElementTag = false;
+            continue;
+        } else if (tagName === options.cdataPropName) {
+            if (isPreviousElementTag) {
+                xmlStr += indentation;
+            }
+            xmlStr += `<![CDATA[${tagObj[tagName][0][options.textNodeName]}]]>`;
+            isPreviousElementTag = false;
+            continue;
+        } else if (tagName === options.commentPropName) {
+            xmlStr += indentation + `<!--${tagObj[tagName][0][options.textNodeName]}-->`;
+            isPreviousElementTag = true;
+            continue;
+        } else if (tagName[0] === "?") {
+            const attStr = attr_to_str(tagObj[":@"], options);
+            const tempInd = tagName === "?xml" ? "" : indentation;
+            let piTextNodeName = tagObj[tagName][0][options.textNodeName];
+            piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing
+            xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`;
+            isPreviousElementTag = true;
+            continue;
+        }
+        let newIdentation = indentation;
+        if (newIdentation !== "") {
+            newIdentation += options.indentBy;
+        }
+        const attStr = attr_to_str(tagObj[":@"], options);
+        const tagStart = indentation + `<${tagName}${attStr}`;
+        const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation);
+        if (options.unpairedTags.indexOf(tagName) !== -1) {
+            if (options.suppressUnpairedNode) xmlStr += tagStart + ">";
+            else xmlStr += tagStart + "/>";
+        } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) {
+            xmlStr += tagStart + "/>";
+        } else if (tagValue && tagValue.endsWith(">")) {
+            xmlStr += tagStart + `>${tagValue}${indentation}</${tagName}>`;
+        } else {
+            xmlStr += tagStart + ">";
+            if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("</"))) {
+                xmlStr += indentation + options.indentBy + tagValue + indentation;
+            } else {
+                xmlStr += tagValue;
+            }
+            xmlStr += `</${tagName}>`;
+        }
+        isPreviousElementTag = true;
+    }
+
+    return xmlStr;
+}
+
+function propName(obj) {
+    const keys = Object.keys(obj);
+    for (let i = 0; i < keys.length; i++) {
+        const key = keys[i];
+        if(!obj.hasOwnProperty(key)) continue;
+        if (key !== ":@") return key;
+    }
+}
+
+function attr_to_str(attrMap, options) {
+    let attrStr = "";
+    if (attrMap && !options.ignoreAttributes) {
+        for (let attr in attrMap) {
+            if(!attrMap.hasOwnProperty(attr)) continue;
+            let attrVal = options.attributeValueProcessor(attr, attrMap[attr]);
+            attrVal = replaceEntitiesValue(attrVal, options);
+            if (attrVal === true && options.suppressBooleanAttributes) {
+                attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`;
+            } else {
+                attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`;
+            }
+        }
+    }
+    return attrStr;
+}
+
+function isStopNode(jPath, options) {
+    jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1);
+    let tagName = jPath.substr(jPath.lastIndexOf(".") + 1);
+    for (let index in options.stopNodes) {
+        if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true;
+    }
+    return false;
+}
+
+function replaceEntitiesValue(textValue, options) {
+    if (textValue && textValue.length > 0 && options.processEntities) {
+        for (let i = 0; i < options.entities.length; i++) {
+            const entity = options.entities[i];
+            textValue = textValue.replace(entity.regex, entity.val);
+        }
+    }
+    return textValue;
+}
+module.exports = toXml;
+
+
+/***/ }),
+
+/***/ 1222:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const util = __nccwpck_require__(8784);
+
+//TODO: handle comments
+function readDocType(xmlData, i){
+    
+    const entities = {};
+    if( xmlData[i + 3] === 'O' &&
+         xmlData[i + 4] === 'C' &&
+         xmlData[i + 5] === 'T' &&
+         xmlData[i + 6] === 'Y' &&
+         xmlData[i + 7] === 'P' &&
+         xmlData[i + 8] === 'E')
+    {    
+        i = i+9;
+        let angleBracketsCount = 1;
+        let hasBody = false, comment = false;
+        let exp = "";
+        for(;i<xmlData.length;i++){
+            if (xmlData[i] === '<' && !comment) { //Determine the tag type
+                if( hasBody && isEntity(xmlData, i)){
+                    i += 7; 
+                    [entityName, val,i] = readEntityExp(xmlData,i+1);
+                    if(val.indexOf("&") === -1) //Parameter entities are not supported
+                        entities[ validateEntityName(entityName) ] = {
+                            regx : RegExp( `&${entityName};`,"g"),
+                            val: val
+                        };
+                }
+                else if( hasBody && isElement(xmlData, i))  i += 8;//Not supported
+                else if( hasBody && isAttlist(xmlData, i))  i += 8;//Not supported
+                else if( hasBody && isNotation(xmlData, i)) i += 9;//Not supported
+                else if( isComment)                         comment = true;
+                else                                        throw new Error("Invalid DOCTYPE");
+
+                angleBracketsCount++;
+                exp = "";
+            } else if (xmlData[i] === '>') { //Read tag content
+                if(comment){
+                    if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){
+                        comment = false;
+                        angleBracketsCount--;
+                    }
+                }else{
+                    angleBracketsCount--;
+                }
+                if (angleBracketsCount === 0) {
+                  break;
+                }
+            }else if( xmlData[i] === '['){
+                hasBody = true;
+            }else{
+                exp += xmlData[i];
+            }
+        }
+        if(angleBracketsCount !== 0){
+            throw new Error(`Unclosed DOCTYPE`);
+        }
+    }else{
+        throw new Error(`Invalid Tag instead of DOCTYPE`);
+    }
+    return {entities, i};
+}
+
+function readEntityExp(xmlData,i){
+    //External entities are not supported
+    //    <!ENTITY ext SYSTEM "http://normal-website.com" >
+
+    //Parameter entities are not supported
+    //    <!ENTITY entityname "&anotherElement;">
+
+    //Internal entities are supported
+    //    <!ENTITY entityname "replacement text">
+    
+    //read EntityName
+    let entityName = "";
+    for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) {
+        // if(xmlData[i] === " ") continue;
+        // else 
+        entityName += xmlData[i];
+    }
+    entityName = entityName.trim();
+    if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported");
+
+    //read Entity Value
+    const startChar = xmlData[i++];
+    let val = ""
+    for (; i < xmlData.length && xmlData[i] !== startChar ; i++) {
+        val += xmlData[i];
+    }
+    return [entityName, val, i];
+}
+
+function isComment(xmlData, i){
+    if(xmlData[i+1] === '!' &&
+    xmlData[i+2] === '-' &&
+    xmlData[i+3] === '-') return true
+    return false
+}
+function isEntity(xmlData, i){
+    if(xmlData[i+1] === '!' &&
+    xmlData[i+2] === 'E' &&
+    xmlData[i+3] === 'N' &&
+    xmlData[i+4] === 'T' &&
+    xmlData[i+5] === 'I' &&
+    xmlData[i+6] === 'T' &&
+    xmlData[i+7] === 'Y') return true
+    return false
+}
+function isElement(xmlData, i){
+    if(xmlData[i+1] === '!' &&
+    xmlData[i+2] === 'E' &&
+    xmlData[i+3] === 'L' &&
+    xmlData[i+4] === 'E' &&
+    xmlData[i+5] === 'M' &&
+    xmlData[i+6] === 'E' &&
+    xmlData[i+7] === 'N' &&
+    xmlData[i+8] === 'T') return true
+    return false
+}
+
+function isAttlist(xmlData, i){
+    if(xmlData[i+1] === '!' &&
+    xmlData[i+2] === 'A' &&
+    xmlData[i+3] === 'T' &&
+    xmlData[i+4] === 'T' &&
+    xmlData[i+5] === 'L' &&
+    xmlData[i+6] === 'I' &&
+    xmlData[i+7] === 'S' &&
+    xmlData[i+8] === 'T') return true
+    return false
+}
+function isNotation(xmlData, i){
+    if(xmlData[i+1] === '!' &&
+    xmlData[i+2] === 'N' &&
+    xmlData[i+3] === 'O' &&
+    xmlData[i+4] === 'T' &&
+    xmlData[i+5] === 'A' &&
+    xmlData[i+6] === 'T' &&
+    xmlData[i+7] === 'I' &&
+    xmlData[i+8] === 'O' &&
+    xmlData[i+9] === 'N') return true
+    return false
+}
+
+function validateEntityName(name){
+    if (util.isName(name))
+	return name;
+    else
+        throw new Error(`Invalid entity name ${name}`);
+}
+
+module.exports = readDocType;
+
+
+/***/ }),
+
+/***/ 1110:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+const defaultOptions = {
+    preserveOrder: false,
+    attributeNamePrefix: '@_',
+    attributesGroupName: false,
+    textNodeName: '#text',
+    ignoreAttributes: true,
+    removeNSPrefix: false, // remove NS from tag name or attribute name if true
+    allowBooleanAttributes: false, //a tag can have attributes without any value
+    //ignoreRootElement : false,
+    parseTagValue: true,
+    parseAttributeValue: false,
+    trimValues: true, //Trim string values of tag and attributes
+    cdataPropName: false,
+    numberParseOptions: {
+      hex: true,
+      leadingZeros: true,
+      eNotation: true
+    },
+    tagValueProcessor: function(tagName, val) {
+      return val;
+    },
+    attributeValueProcessor: function(attrName, val) {
+      return val;
+    },
+    stopNodes: [], //nested tags will not be parsed even for errors
+    alwaysCreateTextNode: false,
+    isArray: () => false,
+    commentPropName: false,
+    unpairedTags: [],
+    processEntities: true,
+    htmlEntities: false,
+    ignoreDeclaration: false,
+    ignorePiTags: false,
+    transformTagName: false,
+    transformAttributeName: false,
+    updateTag: function(tagName, jPath, attrs){
+      return tagName
+    },
+    // skipEmptyListItem: false
+};
+   
+const buildOptions = function(options) {
+    return Object.assign({}, defaultOptions, options);
+};
+
+exports.buildOptions = buildOptions;
+exports.defaultOptions = defaultOptions;
+
+/***/ }),
+
+/***/ 8974:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+///@ts-check
+
+const util = __nccwpck_require__(8784);
+const xmlNode = __nccwpck_require__(5958);
+const readDocType = __nccwpck_require__(1222);
+const toNumber = __nccwpck_require__(6349);
+const getIgnoreAttributesFn = __nccwpck_require__(8123)
+
+// const regx =
+//   '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)'
+//   .replace(/NAME/g, util.nameRegexp);
+
+//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g");
+//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g");
+
+class OrderedObjParser{
+  constructor(options){
+    this.options = options;
+    this.currentNode = null;
+    this.tagsNodeStack = [];
+    this.docTypeEntities = {};
+    this.lastEntities = {
+      "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"},
+      "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"},
+      "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"},
+      "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""},
+    };
+    this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"};
+    this.htmlEntities = {
+      "space": { regex: /&(nbsp|#160);/g, val: " " },
+      // "lt" : { regex: /&(lt|#60);/g, val: "<" },
+      // "gt" : { regex: /&(gt|#62);/g, val: ">" },
+      // "amp" : { regex: /&(amp|#38);/g, val: "&" },
+      // "quot" : { regex: /&(quot|#34);/g, val: "\"" },
+      // "apos" : { regex: /&(apos|#39);/g, val: "'" },
+      "cent" : { regex: /&(cent|#162);/g, val: "¢" },
+      "pound" : { regex: /&(pound|#163);/g, val: "£" },
+      "yen" : { regex: /&(yen|#165);/g, val: "¥" },
+      "euro" : { regex: /&(euro|#8364);/g, val: "€" },
+      "copyright" : { regex: /&(copy|#169);/g, val: "©" },
+      "reg" : { regex: /&(reg|#174);/g, val: "®" },
+      "inr" : { regex: /&(inr|#8377);/g, val: "₹" },
+      "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) },
+      "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) },
+    };
+    this.addExternalEntities = addExternalEntities;
+    this.parseXml = parseXml;
+    this.parseTextData = parseTextData;
+    this.resolveNameSpace = resolveNameSpace;
+    this.buildAttributesMap = buildAttributesMap;
+    this.isItStopNode = isItStopNode;
+    this.replaceEntitiesValue = replaceEntitiesValue;
+    this.readStopNodeData = readStopNodeData;
+    this.saveTextToParentTag = saveTextToParentTag;
+    this.addChild = addChild;
+    this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes)
+  }
+
+}
+
+function addExternalEntities(externalEntities){
+  const entKeys = Object.keys(externalEntities);
+  for (let i = 0; i < entKeys.length; i++) {
+    const ent = entKeys[i];
+    this.lastEntities[ent] = {
+       regex: new RegExp("&"+ent+";","g"),
+       val : externalEntities[ent]
+    }
+  }
+}
+
+/**
+ * @param {string} val
+ * @param {string} tagName
+ * @param {string} jPath
+ * @param {boolean} dontTrim
+ * @param {boolean} hasAttributes
+ * @param {boolean} isLeafNode
+ * @param {boolean} escapeEntities
+ */
+function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) {
+  if (val !== undefined) {
+    if (this.options.trimValues && !dontTrim) {
+      val = val.trim();
+    }
+    if(val.length > 0){
+      if(!escapeEntities) val = this.replaceEntitiesValue(val);
+      
+      const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode);
+      if(newval === null || newval === undefined){
+        //don't parse
+        return val;
+      }else if(typeof newval !== typeof val || newval !== val){
+        //overwrite
+        return newval;
+      }else if(this.options.trimValues){
+        return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions);
+      }else{
+        const trimmedVal = val.trim();
+        if(trimmedVal === val){
+          return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions);
+        }else{
+          return val;
+        }
+      }
+    }
+  }
+}
+
+function resolveNameSpace(tagname) {
+  if (this.options.removeNSPrefix) {
+    const tags = tagname.split(':');
+    const prefix = tagname.charAt(0) === '/' ? '/' : '';
+    if (tags[0] === 'xmlns') {
+      return '';
+    }
+    if (tags.length === 2) {
+      tagname = prefix + tags[1];
+    }
+  }
+  return tagname;
+}
+
+//TODO: change regex to capture NS
+//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm");
+const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm');
+
+function buildAttributesMap(attrStr, jPath, tagName) {
+  if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') {
+    // attrStr = attrStr.replace(/\r?\n/g, ' ');
+    //attrStr = attrStr || attrStr.trim();
+
+    const matches = util.getAllMatches(attrStr, attrsRegx);
+    const len = matches.length; //don't make it inline
+    const attrs = {};
+    for (let i = 0; i < len; i++) {
+      const attrName = this.resolveNameSpace(matches[i][1]);
+      if (this.ignoreAttributesFn(attrName, jPath)) {
+        continue
+      }
+      let oldVal = matches[i][4];
+      let aName = this.options.attributeNamePrefix + attrName;
+      if (attrName.length) {
+        if (this.options.transformAttributeName) {
+          aName = this.options.transformAttributeName(aName);
+        }
+        if(aName === "__proto__") aName  = "#__proto__";
+        if (oldVal !== undefined) {
+          if (this.options.trimValues) {
+            oldVal = oldVal.trim();
+          }
+          oldVal = this.replaceEntitiesValue(oldVal);
+          const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath);
+          if(newVal === null || newVal === undefined){
+            //don't parse
+            attrs[aName] = oldVal;
+          }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){
+            //overwrite
+            attrs[aName] = newVal;
+          }else{
+            //parse
+            attrs[aName] = parseValue(
+              oldVal,
+              this.options.parseAttributeValue,
+              this.options.numberParseOptions
+            );
+          }
+        } else if (this.options.allowBooleanAttributes) {
+          attrs[aName] = true;
+        }
+      }
+    }
+    if (!Object.keys(attrs).length) {
+      return;
+    }
+    if (this.options.attributesGroupName) {
+      const attrCollection = {};
+      attrCollection[this.options.attributesGroupName] = attrs;
+      return attrCollection;
+    }
+    return attrs
+  }
+}
+
+const parseXml = function(xmlData) {
+  xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line
+  const xmlObj = new xmlNode('!xml');
+  let currentNode = xmlObj;
+  let textData = "";
+  let jPath = "";
+  for(let i=0; i< xmlData.length; i++){//for each char in XML data
+    const ch = xmlData[i];
+    if(ch === '<'){
+      // const nextIndex = i+1;
+      // const _2ndChar = xmlData[nextIndex];
+      if( xmlData[i+1] === '/') {//Closing Tag
+        const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.")
+        let tagName = xmlData.substring(i+2,closeIndex).trim();
+
+        if(this.options.removeNSPrefix){
+          const colonIndex = tagName.indexOf(":");
+          if(colonIndex !== -1){
+            tagName = tagName.substr(colonIndex+1);
+          }
+        }
+
+        if(this.options.transformTagName) {
+          tagName = this.options.transformTagName(tagName);
+        }
+
+        if(currentNode){
+          textData = this.saveTextToParentTag(textData, currentNode, jPath);
+        }
+
+        //check if last tag of nested tag was unpaired tag
+        const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1);
+        if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){
+          throw new Error(`Unpaired tag can not be used as closing tag: </${tagName}>`);
+        }
+        let propIndex = 0
+        if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){
+          propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1)
+          this.tagsNodeStack.pop();
+        }else{
+          propIndex = jPath.lastIndexOf(".");
+        }
+        jPath = jPath.substring(0, propIndex);
+
+        currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope
+        textData = "";
+        i = closeIndex;
+      } else if( xmlData[i+1] === '?') {
+
+        let tagData = readTagExp(xmlData,i, false, "?>");
+        if(!tagData) throw new Error("Pi Tag is not closed.");
+
+        textData = this.saveTextToParentTag(textData, currentNode, jPath);
+        if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){
+
+        }else{
+  
+          const childNode = new xmlNode(tagData.tagName);
+          childNode.add(this.options.textNodeName, "");
+          
+          if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){
+            childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName);
+          }
+          this.addChild(currentNode, childNode, jPath)
+
+        }
+
+
+        i = tagData.closeIndex + 1;
+      } else if(xmlData.substr(i + 1, 3) === '!--') {
+        const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.")
+        if(this.options.commentPropName){
+          const comment = xmlData.substring(i + 4, endIndex - 2);
+
+          textData = this.saveTextToParentTag(textData, currentNode, jPath);
+
+          currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]);
+        }
+        i = endIndex;
+      } else if( xmlData.substr(i + 1, 2) === '!D') {
+        const result = readDocType(xmlData, i);
+        this.docTypeEntities = result.entities;
+        i = result.i;
+      }else if(xmlData.substr(i + 1, 2) === '![') {
+        const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2;
+        const tagExp = xmlData.substring(i + 9,closeIndex);
+
+        textData = this.saveTextToParentTag(textData, currentNode, jPath);
+
+        let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true);
+        if(val == undefined) val = "";
+
+        //cdata should be set even if it is 0 length string
+        if(this.options.cdataPropName){
+          currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]);
+        }else{
+          currentNode.add(this.options.textNodeName, val);
+        }
+        
+        i = closeIndex + 2;
+      }else {//Opening tag
+        let result = readTagExp(xmlData,i, this.options.removeNSPrefix);
+        let tagName= result.tagName;
+        const rawTagName = result.rawTagName;
+        let tagExp = result.tagExp;
+        let attrExpPresent = result.attrExpPresent;
+        let closeIndex = result.closeIndex;
+
+        if (this.options.transformTagName) {
+          tagName = this.options.transformTagName(tagName);
+        }
+        
+        //save text as child node
+        if (currentNode && textData) {
+          if(currentNode.tagname !== '!xml'){
+            //when nested tag is found
+            textData = this.saveTextToParentTag(textData, currentNode, jPath, false);
+          }
+        }
+
+        //check if last tag was unpaired tag
+        const lastTag = currentNode;
+        if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){
+          currentNode = this.tagsNodeStack.pop();
+          jPath = jPath.substring(0, jPath.lastIndexOf("."));
+        }
+        if(tagName !== xmlObj.tagname){
+          jPath += jPath ? "." + tagName : tagName;
+        }
+        if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) {
+          let tagContent = "";
+          //self-closing tag
+          if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){
+            if(tagName[tagName.length - 1] === "/"){ //remove trailing '/'
+              tagName = tagName.substr(0, tagName.length - 1);
+              jPath = jPath.substr(0, jPath.length - 1);
+              tagExp = tagName;
+            }else{
+              tagExp = tagExp.substr(0, tagExp.length - 1);
+            }
+            i = result.closeIndex;
+          }
+          //unpaired tag
+          else if(this.options.unpairedTags.indexOf(tagName) !== -1){
+            
+            i = result.closeIndex;
+          }
+          //normal tag
+          else{
+            //read until closing tag is found
+            const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1);
+            if(!result) throw new Error(`Unexpected end of ${rawTagName}`);
+            i = result.i;
+            tagContent = result.tagContent;
+          }
+
+          const childNode = new xmlNode(tagName);
+          if(tagName !== tagExp && attrExpPresent){
+            childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName);
+          }
+          if(tagContent) {
+            tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true);
+          }
+          
+          jPath = jPath.substr(0, jPath.lastIndexOf("."));
+          childNode.add(this.options.textNodeName, tagContent);
+          
+          this.addChild(currentNode, childNode, jPath)
+        }else{
+  //selfClosing tag
+          if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){
+            if(tagName[tagName.length - 1] === "/"){ //remove trailing '/'
+              tagName = tagName.substr(0, tagName.length - 1);
+              jPath = jPath.substr(0, jPath.length - 1);
+              tagExp = tagName;
+            }else{
+              tagExp = tagExp.substr(0, tagExp.length - 1);
+            }
+            
+            if(this.options.transformTagName) {
+              tagName = this.options.transformTagName(tagName);
+            }
+
+            const childNode = new xmlNode(tagName);
+            if(tagName !== tagExp && attrExpPresent){
+              childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName);
+            }
+            this.addChild(currentNode, childNode, jPath)
+            jPath = jPath.substr(0, jPath.lastIndexOf("."));
+          }
+    //opening tag
+          else{
+            const childNode = new xmlNode( tagName);
+            this.tagsNodeStack.push(currentNode);
+            
+            if(tagName !== tagExp && attrExpPresent){
+              childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName);
+            }
+            this.addChild(currentNode, childNode, jPath)
+            currentNode = childNode;
+          }
+          textData = "";
+          i = closeIndex;
+        }
+      }
+    }else{
+      textData += xmlData[i];
+    }
+  }
+  return xmlObj.child;
+}
+
+function addChild(currentNode, childNode, jPath){
+  const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"])
+  if(result === false){
+  }else if(typeof result === "string"){
+    childNode.tagname = result
+    currentNode.addChild(childNode);
+  }else{
+    currentNode.addChild(childNode);
+  }
+}
+
+const replaceEntitiesValue = function(val){
+
+  if(this.options.processEntities){
+    for(let entityName in this.docTypeEntities){
+      const entity = this.docTypeEntities[entityName];
+      val = val.replace( entity.regx, entity.val);
+    }
+    for(let entityName in this.lastEntities){
+      const entity = this.lastEntities[entityName];
+      val = val.replace( entity.regex, entity.val);
+    }
+    if(this.options.htmlEntities){
+      for(let entityName in this.htmlEntities){
+        const entity = this.htmlEntities[entityName];
+        val = val.replace( entity.regex, entity.val);
+      }
+    }
+    val = val.replace( this.ampEntity.regex, this.ampEntity.val);
+  }
+  return val;
+}
+function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) {
+  if (textData) { //store previously collected data as textNode
+    if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0
+    
+    textData = this.parseTextData(textData,
+      currentNode.tagname,
+      jPath,
+      false,
+      currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false,
+      isLeafNode);
+
+    if (textData !== undefined && textData !== "")
+      currentNode.add(this.options.textNodeName, textData);
+    textData = "";
+  }
+  return textData;
+}
+
+//TODO: use jPath to simplify the logic
+/**
+ * 
+ * @param {string[]} stopNodes 
+ * @param {string} jPath
+ * @param {string} currentTagName 
+ */
+function isItStopNode(stopNodes, jPath, currentTagName){
+  const allNodesExp = "*." + currentTagName;
+  for (const stopNodePath in stopNodes) {
+    const stopNodeExp = stopNodes[stopNodePath];
+    if( allNodesExp === stopNodeExp || jPath === stopNodeExp  ) return true;
+  }
+  return false;
+}
+
+/**
+ * Returns the tag Expression and where it is ending handling single-double quotes situation
+ * @param {string} xmlData 
+ * @param {number} i starting index
+ * @returns 
+ */
+function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){
+  let attrBoundary;
+  let tagExp = "";
+  for (let index = i; index < xmlData.length; index++) {
+    let ch = xmlData[index];
+    if (attrBoundary) {
+        if (ch === attrBoundary) attrBoundary = "";//reset
+    } else if (ch === '"' || ch === "'") {
+        attrBoundary = ch;
+    } else if (ch === closingChar[0]) {
+      if(closingChar[1]){
+        if(xmlData[index + 1] === closingChar[1]){
+          return {
+            data: tagExp,
+            index: index
+          }
+        }
+      }else{
+        return {
+          data: tagExp,
+          index: index
+        }
+      }
+    } else if (ch === '\t') {
+      ch = " "
+    }
+    tagExp += ch;
+  }
+}
+
+function findClosingIndex(xmlData, str, i, errMsg){
+  const closingIndex = xmlData.indexOf(str, i);
+  if(closingIndex === -1){
+    throw new Error(errMsg)
+  }else{
+    return closingIndex + str.length - 1;
+  }
+}
+
+function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){
+  const result = tagExpWithClosingIndex(xmlData, i+1, closingChar);
+  if(!result) return;
+  let tagExp = result.data;
+  const closeIndex = result.index;
+  const separatorIndex = tagExp.search(/\s/);
+  let tagName = tagExp;
+  let attrExpPresent = true;
+  if(separatorIndex !== -1){//separate tag name and attributes expression
+    tagName = tagExp.substring(0, separatorIndex);
+    tagExp = tagExp.substring(separatorIndex + 1).trimStart();
+  }
+
+  const rawTagName = tagName;
+  if(removeNSPrefix){
+    const colonIndex = tagName.indexOf(":");
+    if(colonIndex !== -1){
+      tagName = tagName.substr(colonIndex+1);
+      attrExpPresent = tagName !== result.data.substr(colonIndex + 1);
+    }
+  }
+
+  return {
+    tagName: tagName,
+    tagExp: tagExp,
+    closeIndex: closeIndex,
+    attrExpPresent: attrExpPresent,
+    rawTagName: rawTagName,
+  }
+}
+/**
+ * find paired tag for a stop node
+ * @param {string} xmlData 
+ * @param {string} tagName 
+ * @param {number} i 
+ */
+function readStopNodeData(xmlData, tagName, i){
+  const startIndex = i;
+  // Starting at 1 since we already have an open tag
+  let openTagCount = 1;
+
+  for (; i < xmlData.length; i++) {
+    if( xmlData[i] === "<"){ 
+      if (xmlData[i+1] === "/") {//close tag
+          const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`);
+          let closeTagName = xmlData.substring(i+2,closeIndex).trim();
+          if(closeTagName === tagName){
+            openTagCount--;
+            if (openTagCount === 0) {
+              return {
+                tagContent: xmlData.substring(startIndex, i),
+                i : closeIndex
+              }
+            }
+          }
+          i=closeIndex;
+        } else if(xmlData[i+1] === '?') { 
+          const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.")
+          i=closeIndex;
+        } else if(xmlData.substr(i + 1, 3) === '!--') { 
+          const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.")
+          i=closeIndex;
+        } else if(xmlData.substr(i + 1, 2) === '![') { 
+          const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2;
+          i=closeIndex;
+        } else {
+          const tagData = readTagExp(xmlData, i, '>')
+
+          if (tagData) {
+            const openTagName = tagData && tagData.tagName;
+            if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") {
+              openTagCount++;
+            }
+            i=tagData.closeIndex;
+          }
+        }
+      }
+  }//end for loop
+}
+
+function parseValue(val, shouldParse, options) {
+  if (shouldParse && typeof val === 'string') {
+    //console.log(options)
+    const newval = val.trim();
+    if(newval === 'true' ) return true;
+    else if(newval === 'false' ) return false;
+    else return toNumber(val, options);
+  } else {
+    if (util.isExist(val)) {
+      return val;
+    } else {
+      return '';
+    }
+  }
+}
+
+
+module.exports = OrderedObjParser;
+
+
+/***/ }),
+
+/***/ 3981:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const { buildOptions} = __nccwpck_require__(1110);
+const OrderedObjParser = __nccwpck_require__(8974);
+const { prettify} = __nccwpck_require__(7063);
+const validator = __nccwpck_require__(7520);
+
+class XMLParser{
+    
+    constructor(options){
+        this.externalEntities = {};
+        this.options = buildOptions(options);
+        
+    }
+    /**
+     * Parse XML dats to JS object 
+     * @param {string|Buffer} xmlData 
+     * @param {boolean|Object} validationOption 
+     */
+    parse(xmlData,validationOption){
+        if(typeof xmlData === "string"){
+        }else if( xmlData.toString){
+            xmlData = xmlData.toString();
+        }else{
+            throw new Error("XML data is accepted in String or Bytes[] form.")
+        }
+        if( validationOption){
+            if(validationOption === true) validationOption = {}; //validate with default options
+            
+            const result = validator.validate(xmlData, validationOption);
+            if (result !== true) {
+              throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` )
+            }
+          }
+        const orderedObjParser = new OrderedObjParser(this.options);
+        orderedObjParser.addExternalEntities(this.externalEntities);
+        const orderedResult = orderedObjParser.parseXml(xmlData);
+        if(this.options.preserveOrder || orderedResult === undefined) return orderedResult;
+        else return prettify(orderedResult, this.options);
+    }
+
+    /**
+     * Add Entity which is not by default supported by this library
+     * @param {string} key 
+     * @param {string} value 
+     */
+    addEntity(key, value){
+        if(value.indexOf("&") !== -1){
+            throw new Error("Entity value can't have '&'")
+        }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){
+            throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for '&#xD;'")
+        }else if(value === "&"){
+            throw new Error("An entity with value '&' is not permitted");
+        }else{
+            this.externalEntities[key] = value;
+        }
+    }
+}
+
+module.exports = XMLParser;
+
+/***/ }),
+
+/***/ 7063:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+
+/**
+ * 
+ * @param {array} node 
+ * @param {any} options 
+ * @returns 
+ */
+function prettify(node, options){
+  return compress( node, options);
+}
+
+/**
+ * 
+ * @param {array} arr 
+ * @param {object} options 
+ * @param {string} jPath 
+ * @returns object
+ */
+function compress(arr, options, jPath){
+  let text;
+  const compressedObj = {};
+  for (let i = 0; i < arr.length; i++) {
+    const tagObj = arr[i];
+    const property = propName(tagObj);
+    let newJpath = "";
+    if(jPath === undefined) newJpath = property;
+    else newJpath = jPath + "." + property;
+
+    if(property === options.textNodeName){
+      if(text === undefined) text = tagObj[property];
+      else text += "" + tagObj[property];
+    }else if(property === undefined){
+      continue;
+    }else if(tagObj[property]){
+      
+      let val = compress(tagObj[property], options, newJpath);
+      const isLeaf = isLeafTag(val, options);
+
+      if(tagObj[":@"]){
+        assignAttributes( val, tagObj[":@"], newJpath, options);
+      }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){
+        val = val[options.textNodeName];
+      }else if(Object.keys(val).length === 0){
+        if(options.alwaysCreateTextNode) val[options.textNodeName] = "";
+        else val = "";
+      }
+
+      if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) {
+        if(!Array.isArray(compressedObj[property])) {
+            compressedObj[property] = [ compressedObj[property] ];
+        }
+        compressedObj[property].push(val);
+      }else{
+        //TODO: if a node is not an array, then check if it should be an array
+        //also determine if it is a leaf node
+        if (options.isArray(property, newJpath, isLeaf )) {
+          compressedObj[property] = [val];
+        }else{
+          compressedObj[property] = val;
+        }
+      }
+    }
+    
+  }
+  // if(text && text.length > 0) compressedObj[options.textNodeName] = text;
+  if(typeof text === "string"){
+    if(text.length > 0) compressedObj[options.textNodeName] = text;
+  }else if(text !== undefined) compressedObj[options.textNodeName] = text;
+  return compressedObj;
+}
+
+function propName(obj){
+  const keys = Object.keys(obj);
+  for (let i = 0; i < keys.length; i++) {
+    const key = keys[i];
+    if(key !== ":@") return key;
+  }
+}
+
+function assignAttributes(obj, attrMap, jpath, options){
+  if (attrMap) {
+    const keys = Object.keys(attrMap);
+    const len = keys.length; //don't make it inline
+    for (let i = 0; i < len; i++) {
+      const atrrName = keys[i];
+      if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) {
+        obj[atrrName] = [ attrMap[atrrName] ];
+      } else {
+        obj[atrrName] = attrMap[atrrName];
+      }
+    }
+  }
+}
+
+function isLeafTag(obj, options){
+  const { textNodeName } = options;
+  const propCount = Object.keys(obj).length;
+  
+  if (propCount === 0) {
+    return true;
+  }
+
+  if (
+    propCount === 1 &&
+    (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0)
+  ) {
+    return true;
+  }
+
+  return false;
+}
+exports.prettify = prettify;
+
+
+/***/ }),
+
+/***/ 5958:
+/***/ ((module) => {
+
+
+
+class XmlNode{
+  constructor(tagname) {
+    this.tagname = tagname;
+    this.child = []; //nested tags, text, cdata, comments in order
+    this[":@"] = {}; //attributes map
+  }
+  add(key,val){
+    // this.child.push( {name : key, val: val, isCdata: isCdata });
+    if(key === "__proto__") key = "#__proto__";
+    this.child.push( {[key]: val });
+  }
+  addChild(node) {
+    if(node.tagname === "__proto__") node.tagname = "#__proto__";
+    if(node[":@"] && Object.keys(node[":@"]).length > 0){
+      this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] });
+    }else{
+      this.child.push( { [node.tagname]: node.child });
+    }
+  };
+};
+
+
+module.exports = XmlNode;
+
+/***/ }),
+
+/***/ 1330:
+/***/ ((module) => {
+
+
+
+module.exports = (flag, argv = process.argv) => {
+	const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
+	const position = argv.indexOf(prefix + flag);
+	const terminatorPosition = argv.indexOf('--');
+	return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
+};
+
+
+/***/ }),
+
+/***/ 9038:
+/***/ ((module) => {
+
+
+// rfc7231 6.1
+const statusCodeCacheableByDefault = new Set([
+    200,
+    203,
+    204,
+    206,
+    300,
+    301,
+    308,
+    404,
+    405,
+    410,
+    414,
+    501,
+]);
+
+// This implementation does not understand partial responses (206)
+const understoodStatuses = new Set([
+    200,
+    203,
+    204,
+    300,
+    301,
+    302,
+    303,
+    307,
+    308,
+    404,
+    405,
+    410,
+    414,
+    501,
+]);
+
+const errorStatusCodes = new Set([
+    500,
+    502,
+    503, 
+    504,
+]);
+
+const hopByHopHeaders = {
+    date: true, // included, because we add Age update Date
+    connection: true,
+    'keep-alive': true,
+    'proxy-authenticate': true,
+    'proxy-authorization': true,
+    te: true,
+    trailer: true,
+    'transfer-encoding': true,
+    upgrade: true,
+};
+
+const excludedFromRevalidationUpdate = {
+    // Since the old body is reused, it doesn't make sense to change properties of the body
+    'content-length': true,
+    'content-encoding': true,
+    'transfer-encoding': true,
+    'content-range': true,
+};
+
+function toNumberOrZero(s) {
+    const n = parseInt(s, 10);
+    return isFinite(n) ? n : 0;
+}
+
+// RFC 5861
+function isErrorResponse(response) {
+    // consider undefined response as faulty
+    if(!response) {
+        return true
+    }
+    return errorStatusCodes.has(response.status);
+}
+
+function parseCacheControl(header) {
+    const cc = {};
+    if (!header) return cc;
+
+    // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),
+    // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale
+    const parts = header.trim().split(/,/);
+    for (const part of parts) {
+        const [k, v] = part.split(/=/, 2);
+        cc[k.trim()] = v === undefined ? true : v.trim().replace(/^"|"$/g, '');
+    }
+
+    return cc;
+}
+
+function formatCacheControl(cc) {
+    let parts = [];
+    for (const k in cc) {
+        const v = cc[k];
+        parts.push(v === true ? k : k + '=' + v);
+    }
+    if (!parts.length) {
+        return undefined;
+    }
+    return parts.join(', ');
+}
+
+module.exports = class CachePolicy {
+    constructor(
+        req,
+        res,
+        {
+            shared,
+            cacheHeuristic,
+            immutableMinTimeToLive,
+            ignoreCargoCult,
+            _fromObject,
+        } = {}
+    ) {
+        if (_fromObject) {
+            this._fromObject(_fromObject);
+            return;
+        }
+
+        if (!res || !res.headers) {
+            throw Error('Response headers missing');
+        }
+        this._assertRequestHasHeaders(req);
+
+        this._responseTime = this.now();
+        this._isShared = shared !== false;
+        this._cacheHeuristic =
+            undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE
+        this._immutableMinTtl =
+            undefined !== immutableMinTimeToLive
+                ? immutableMinTimeToLive
+                : 24 * 3600 * 1000;
+
+        this._status = 'status' in res ? res.status : 200;
+        this._resHeaders = res.headers;
+        this._rescc = parseCacheControl(res.headers['cache-control']);
+        this._method = 'method' in req ? req.method : 'GET';
+        this._url = req.url;
+        this._host = req.headers.host;
+        this._noAuthorization = !req.headers.authorization;
+        this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used
+        this._reqcc = parseCacheControl(req.headers['cache-control']);
+
+        // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,
+        // so there's no point stricly adhering to the blindly copy&pasted directives.
+        if (
+            ignoreCargoCult &&
+            'pre-check' in this._rescc &&
+            'post-check' in this._rescc
+        ) {
+            delete this._rescc['pre-check'];
+            delete this._rescc['post-check'];
+            delete this._rescc['no-cache'];
+            delete this._rescc['no-store'];
+            delete this._rescc['must-revalidate'];
+            this._resHeaders = Object.assign({}, this._resHeaders, {
+                'cache-control': formatCacheControl(this._rescc),
+            });
+            delete this._resHeaders.expires;
+            delete this._resHeaders.pragma;
+        }
+
+        // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive
+        // as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1).
+        if (
+            res.headers['cache-control'] == null &&
+            /no-cache/.test(res.headers.pragma)
+        ) {
+            this._rescc['no-cache'] = true;
+        }
+    }
+
+    now() {
+        return Date.now();
+    }
+
+    storable() {
+        // The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.
+        return !!(
+            !this._reqcc['no-store'] &&
+            // A cache MUST NOT store a response to any request, unless:
+            // The request method is understood by the cache and defined as being cacheable, and
+            ('GET' === this._method ||
+                'HEAD' === this._method ||
+                ('POST' === this._method && this._hasExplicitExpiration())) &&
+            // the response status code is understood by the cache, and
+            understoodStatuses.has(this._status) &&
+            // the "no-store" cache directive does not appear in request or response header fields, and
+            !this._rescc['no-store'] &&
+            // the "private" response directive does not appear in the response, if the cache is shared, and
+            (!this._isShared || !this._rescc.private) &&
+            // the Authorization header field does not appear in the request, if the cache is shared,
+            (!this._isShared ||
+                this._noAuthorization ||
+                this._allowsStoringAuthenticated()) &&
+            // the response either:
+            // contains an Expires header field, or
+            (this._resHeaders.expires ||
+                // contains a max-age response directive, or
+                // contains a s-maxage response directive and the cache is shared, or
+                // contains a public response directive.
+                this._rescc['max-age'] ||
+                (this._isShared && this._rescc['s-maxage']) ||
+                this._rescc.public ||
+                // has a status code that is defined as cacheable by default
+                statusCodeCacheableByDefault.has(this._status))
+        );
+    }
+
+    _hasExplicitExpiration() {
+        // 4.2.1 Calculating Freshness Lifetime
+        return (
+            (this._isShared && this._rescc['s-maxage']) ||
+            this._rescc['max-age'] ||
+            this._resHeaders.expires
+        );
+    }
+
+    _assertRequestHasHeaders(req) {
+        if (!req || !req.headers) {
+            throw Error('Request headers missing');
+        }
+    }
+
+    satisfiesWithoutRevalidation(req) {
+        this._assertRequestHasHeaders(req);
+
+        // When presented with a request, a cache MUST NOT reuse a stored response, unless:
+        // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,
+        // unless the stored response is successfully validated (Section 4.3), and
+        const requestCC = parseCacheControl(req.headers['cache-control']);
+        if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {
+            return false;
+        }
+
+        if (requestCC['max-age'] && this.age() > requestCC['max-age']) {
+            return false;
+        }
+
+        if (
+            requestCC['min-fresh'] &&
+            this.timeToLive() < 1000 * requestCC['min-fresh']
+        ) {
+            return false;
+        }
+
+        // the stored response is either:
+        // fresh, or allowed to be served stale
+        if (this.stale()) {
+            const allowsStale =
+                requestCC['max-stale'] &&
+                !this._rescc['must-revalidate'] &&
+                (true === requestCC['max-stale'] ||
+                    requestCC['max-stale'] > this.age() - this.maxAge());
+            if (!allowsStale) {
+                return false;
+            }
+        }
+
+        return this._requestMatches(req, false);
+    }
+
+    _requestMatches(req, allowHeadMethod) {
+        // The presented effective request URI and that of the stored response match, and
+        return (
+            (!this._url || this._url === req.url) &&
+            this._host === req.headers.host &&
+            // the request method associated with the stored response allows it to be used for the presented request, and
+            (!req.method ||
+                this._method === req.method ||
+                (allowHeadMethod && 'HEAD' === req.method)) &&
+            // selecting header fields nominated by the stored response (if any) match those presented, and
+            this._varyMatches(req)
+        );
+    }
+
+    _allowsStoringAuthenticated() {
+        //  following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
+        return (
+            this._rescc['must-revalidate'] ||
+            this._rescc.public ||
+            this._rescc['s-maxage']
+        );
+    }
+
+    _varyMatches(req) {
+        if (!this._resHeaders.vary) {
+            return true;
+        }
+
+        // A Vary header field-value of "*" always fails to match
+        if (this._resHeaders.vary === '*') {
+            return false;
+        }
+
+        const fields = this._resHeaders.vary
+            .trim()
+            .toLowerCase()
+            .split(/\s*,\s*/);
+        for (const name of fields) {
+            if (req.headers[name] !== this._reqHeaders[name]) return false;
+        }
+        return true;
+    }
+
+    _copyWithoutHopByHopHeaders(inHeaders) {
+        const headers = {};
+        for (const name in inHeaders) {
+            if (hopByHopHeaders[name]) continue;
+            headers[name] = inHeaders[name];
+        }
+        // 9.1.  Connection
+        if (inHeaders.connection) {
+            const tokens = inHeaders.connection.trim().split(/\s*,\s*/);
+            for (const name of tokens) {
+                delete headers[name];
+            }
+        }
+        if (headers.warning) {
+            const warnings = headers.warning.split(/,/).filter(warning => {
+                return !/^\s*1[0-9][0-9]/.test(warning);
+            });
+            if (!warnings.length) {
+                delete headers.warning;
+            } else {
+                headers.warning = warnings.join(',').trim();
+            }
+        }
+        return headers;
+    }
+
+    responseHeaders() {
+        const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
+        const age = this.age();
+
+        // A cache SHOULD generate 113 warning if it heuristically chose a freshness
+        // lifetime greater than 24 hours and the response's age is greater than 24 hours.
+        if (
+            age > 3600 * 24 &&
+            !this._hasExplicitExpiration() &&
+            this.maxAge() > 3600 * 24
+        ) {
+            headers.warning =
+                (headers.warning ? `${headers.warning}, ` : '') +
+                '113 - "rfc7234 5.5.4"';
+        }
+        headers.age = `${Math.round(age)}`;
+        headers.date = new Date(this.now()).toUTCString();
+        return headers;
+    }
+
+    /**
+     * Value of the Date response header or current time if Date was invalid
+     * @return timestamp
+     */
+    date() {
+        const serverDate = Date.parse(this._resHeaders.date);
+        if (isFinite(serverDate)) {
+            return serverDate;
+        }
+        return this._responseTime;
+    }
+
+    /**
+     * Value of the Age header, in seconds, updated for the current time.
+     * May be fractional.
+     *
+     * @return Number
+     */
+    age() {
+        let age = this._ageValue();
+
+        const residentTime = (this.now() - this._responseTime) / 1000;
+        return age + residentTime;
+    }
+
+    _ageValue() {
+        return toNumberOrZero(this._resHeaders.age);
+    }
+
+    /**
+     * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
+     *
+     * For an up-to-date value, see `timeToLive()`.
+     *
+     * @return Number
+     */
+    maxAge() {
+        if (!this.storable() || this._rescc['no-cache']) {
+            return 0;
+        }
+
+        // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default
+        // so this implementation requires explicit opt-in via public header
+        if (
+            this._isShared &&
+            (this._resHeaders['set-cookie'] &&
+                !this._rescc.public &&
+                !this._rescc.immutable)
+        ) {
+            return 0;
+        }
+
+        if (this._resHeaders.vary === '*') {
+            return 0;
+        }
+
+        if (this._isShared) {
+            if (this._rescc['proxy-revalidate']) {
+                return 0;
+            }
+            // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.
+            if (this._rescc['s-maxage']) {
+                return toNumberOrZero(this._rescc['s-maxage']);
+            }
+        }
+
+        // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.
+        if (this._rescc['max-age']) {
+            return toNumberOrZero(this._rescc['max-age']);
+        }
+
+        const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;
+
+        const serverDate = this.date();
+        if (this._resHeaders.expires) {
+            const expires = Date.parse(this._resHeaders.expires);
+            // A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired").
+            if (Number.isNaN(expires) || expires < serverDate) {
+                return 0;
+            }
+            return Math.max(defaultMinTtl, (expires - serverDate) / 1000);
+        }
+
+        if (this._resHeaders['last-modified']) {
+            const lastModified = Date.parse(this._resHeaders['last-modified']);
+            if (isFinite(lastModified) && serverDate > lastModified) {
+                return Math.max(
+                    defaultMinTtl,
+                    ((serverDate - lastModified) / 1000) * this._cacheHeuristic
+                );
+            }
+        }
+
+        return defaultMinTtl;
+    }
+
+    timeToLive() {
+        const age = this.maxAge() - this.age();
+        const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']);
+        const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']);
+        return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000;
+    }
+
+    stale() {
+        return this.maxAge() <= this.age();
+    }
+
+    _useStaleIfError() {
+        return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age();
+    }
+
+    useStaleWhileRevalidate() {
+        return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age();
+    }
+
+    static fromObject(obj) {
+        return new this(undefined, undefined, { _fromObject: obj });
+    }
+
+    _fromObject(obj) {
+        if (this._responseTime) throw Error('Reinitialized');
+        if (!obj || obj.v !== 1) throw Error('Invalid serialization');
+
+        this._responseTime = obj.t;
+        this._isShared = obj.sh;
+        this._cacheHeuristic = obj.ch;
+        this._immutableMinTtl =
+            obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;
+        this._status = obj.st;
+        this._resHeaders = obj.resh;
+        this._rescc = obj.rescc;
+        this._method = obj.m;
+        this._url = obj.u;
+        this._host = obj.h;
+        this._noAuthorization = obj.a;
+        this._reqHeaders = obj.reqh;
+        this._reqcc = obj.reqcc;
+    }
+
+    toObject() {
+        return {
+            v: 1,
+            t: this._responseTime,
+            sh: this._isShared,
+            ch: this._cacheHeuristic,
+            imm: this._immutableMinTtl,
+            st: this._status,
+            resh: this._resHeaders,
+            rescc: this._rescc,
+            m: this._method,
+            u: this._url,
+            h: this._host,
+            a: this._noAuthorization,
+            reqh: this._reqHeaders,
+            reqcc: this._reqcc,
+        };
+    }
+
+    /**
+     * Headers for sending to the origin server to revalidate stale response.
+     * Allows server to return 304 to allow reuse of the previous response.
+     *
+     * Hop by hop headers are always stripped.
+     * Revalidation headers may be added or removed, depending on request.
+     */
+    revalidationHeaders(incomingReq) {
+        this._assertRequestHasHeaders(incomingReq);
+        const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);
+
+        // This implementation does not understand range requests
+        delete headers['if-range'];
+
+        if (!this._requestMatches(incomingReq, true) || !this.storable()) {
+            // revalidation allowed via HEAD
+            // not for the same resource, or wasn't allowed to be cached anyway
+            delete headers['if-none-match'];
+            delete headers['if-modified-since'];
+            return headers;
+        }
+
+        /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */
+        if (this._resHeaders.etag) {
+            headers['if-none-match'] = headers['if-none-match']
+                ? `${headers['if-none-match']}, ${this._resHeaders.etag}`
+                : this._resHeaders.etag;
+        }
+
+        // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.
+        const forbidsWeakValidators =
+            headers['accept-ranges'] ||
+            headers['if-match'] ||
+            headers['if-unmodified-since'] ||
+            (this._method && this._method != 'GET');
+
+        /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.
+        Note: This implementation does not understand partial responses (206) */
+        if (forbidsWeakValidators) {
+            delete headers['if-modified-since'];
+
+            if (headers['if-none-match']) {
+                const etags = headers['if-none-match']
+                    .split(/,/)
+                    .filter(etag => {
+                        return !/^\s*W\//.test(etag);
+                    });
+                if (!etags.length) {
+                    delete headers['if-none-match'];
+                } else {
+                    headers['if-none-match'] = etags.join(',').trim();
+                }
+            }
+        } else if (
+            this._resHeaders['last-modified'] &&
+            !headers['if-modified-since']
+        ) {
+            headers['if-modified-since'] = this._resHeaders['last-modified'];
+        }
+
+        return headers;
+    }
+
+    /**
+     * Creates new CachePolicy with information combined from the previews response,
+     * and the new revalidation response.
+     *
+     * Returns {policy, modified} where modified is a boolean indicating
+     * whether the response body has been modified, and old cached body can't be used.
+     *
+     * @return {Object} {policy: CachePolicy, modified: Boolean}
+     */
+    revalidatedPolicy(request, response) {
+        this._assertRequestHasHeaders(request);
+        if(this._useStaleIfError() && isErrorResponse(response)) {  // I consider the revalidation request unsuccessful
+          return {
+            modified: false,
+            matches: false,
+            policy: this,
+          };
+        }
+        if (!response || !response.headers) {
+            throw Error('Response headers missing');
+        }
+
+        // These aren't going to be supported exactly, since one CachePolicy object
+        // doesn't know about all the other cached objects.
+        let matches = false;
+        if (response.status !== undefined && response.status != 304) {
+            matches = false;
+        } else if (
+            response.headers.etag &&
+            !/^\s*W\//.test(response.headers.etag)
+        ) {
+            // "All of the stored responses with the same strong validator are selected.
+            // If none of the stored responses contain the same strong validator,
+            // then the cache MUST NOT use the new response to update any stored responses."
+            matches =
+                this._resHeaders.etag &&
+                this._resHeaders.etag.replace(/^\s*W\//, '') ===
+                    response.headers.etag;
+        } else if (this._resHeaders.etag && response.headers.etag) {
+            // "If the new response contains a weak validator and that validator corresponds
+            // to one of the cache's stored responses,
+            // then the most recent of those matching stored responses is selected for update."
+            matches =
+                this._resHeaders.etag.replace(/^\s*W\//, '') ===
+                response.headers.etag.replace(/^\s*W\//, '');
+        } else if (this._resHeaders['last-modified']) {
+            matches =
+                this._resHeaders['last-modified'] ===
+                response.headers['last-modified'];
+        } else {
+            // If the new response does not include any form of validator (such as in the case where
+            // a client generates an If-Modified-Since request from a source other than the Last-Modified
+            // response header field), and there is only one stored response, and that stored response also
+            // lacks a validator, then that stored response is selected for update.
+            if (
+                !this._resHeaders.etag &&
+                !this._resHeaders['last-modified'] &&
+                !response.headers.etag &&
+                !response.headers['last-modified']
+            ) {
+                matches = true;
+            }
+        }
+
+        if (!matches) {
+            return {
+                policy: new this.constructor(request, response),
+                // Client receiving 304 without body, even if it's invalid/mismatched has no option
+                // but to reuse a cached body. We don't have a good way to tell clients to do
+                // error recovery in such case.
+                modified: response.status != 304,
+                matches: false,
+            };
+        }
+
+        // use other header fields provided in the 304 (Not Modified) response to replace all instances
+        // of the corresponding header fields in the stored response.
+        const headers = {};
+        for (const k in this._resHeaders) {
+            headers[k] =
+                k in response.headers && !excludedFromRevalidationUpdate[k]
+                    ? response.headers[k]
+                    : this._resHeaders[k];
+        }
+
+        const newResponse = Object.assign({}, response, {
+            status: this._status,
+            method: this._method,
+            headers,
+        });
+        return {
+            policy: new this.constructor(request, newResponse, {
+                shared: this._isShared,
+                cacheHeuristic: this._cacheHeuristic,
+                immutableMinTimeToLive: this._immutableMinTtl,
+            }),
+            modified: false,
+            matches: true,
+        };
+    }
+};
+
+
+/***/ }),
+
+/***/ 4702:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpProxyAgent = void 0;
+const net = __importStar(__nccwpck_require__(9278));
+const tls = __importStar(__nccwpck_require__(4756));
+const debug_1 = __importDefault(__nccwpck_require__(8441));
+const events_1 = __nccwpck_require__(4434);
+const agent_base_1 = __nccwpck_require__(3583);
+const url_1 = __nccwpck_require__(7016);
+const debug = (0, debug_1.default)('http-proxy-agent');
+/**
+ * The `HttpProxyAgent` implements an HTTP Agent subclass that connects
+ * to the specified "HTTP proxy server" in order to proxy HTTP requests.
+ */
+class HttpProxyAgent extends agent_base_1.Agent {
+    constructor(proxy, opts) {
+        super(opts);
+        this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy;
+        this.proxyHeaders = opts?.headers ?? {};
+        debug('Creating new HttpProxyAgent instance: %o', this.proxy.href);
+        // Trim off the brackets from IPv6 addresses
+        const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, '');
+        const port = this.proxy.port
+            ? parseInt(this.proxy.port, 10)
+            : this.proxy.protocol === 'https:'
+                ? 443
+                : 80;
+        this.connectOpts = {
+            ...(opts ? omit(opts, 'headers') : null),
+            host,
+            port,
+        };
+    }
+    addRequest(req, opts) {
+        req._header = null;
+        this.setRequestProps(req, opts);
+        // @ts-expect-error `addRequest()` isn't defined in `@types/node`
+        super.addRequest(req, opts);
+    }
+    setRequestProps(req, opts) {
+        const { proxy } = this;
+        const protocol = opts.secureEndpoint ? 'https:' : 'http:';
+        const hostname = req.getHeader('host') || 'localhost';
+        const base = `${protocol}//${hostname}`;
+        const url = new url_1.URL(req.path, base);
+        if (opts.port !== 80) {
+            url.port = String(opts.port);
+        }
+        // Change the `http.ClientRequest` instance's "path" field
+        // to the absolute path of the URL that will be requested.
+        req.path = String(url);
+        // Inject the `Proxy-Authorization` header if necessary.
+        const headers = typeof this.proxyHeaders === 'function'
+            ? this.proxyHeaders()
+            : { ...this.proxyHeaders };
+        if (proxy.username || proxy.password) {
+            const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
+            headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;
+        }
+        if (!headers['Proxy-Connection']) {
+            headers['Proxy-Connection'] = this.keepAlive
+                ? 'Keep-Alive'
+                : 'close';
+        }
+        for (const name of Object.keys(headers)) {
+            const value = headers[name];
+            if (value) {
+                req.setHeader(name, value);
+            }
+        }
+    }
+    async connect(req, opts) {
+        req._header = null;
+        if (!req.path.includes('://')) {
+            this.setRequestProps(req, opts);
+        }
+        // At this point, the http ClientRequest's internal `_header` field
+        // might have already been set. If this is the case then we'll need
+        // to re-generate the string since we just changed the `req.path`.
+        let first;
+        let endOfHeaders;
+        debug('Regenerating stored HTTP header string for request');
+        req._implicitHeader();
+        if (req.outputData && req.outputData.length > 0) {
+            debug('Patching connection write() output buffer with updated header');
+            first = req.outputData[0].data;
+            endOfHeaders = first.indexOf('\r\n\r\n') + 4;
+            req.outputData[0].data =
+                req._header + first.substring(endOfHeaders);
+            debug('Output buffer: %o', req.outputData[0].data);
+        }
+        // Create a socket connection to the proxy server.
+        let socket;
+        if (this.proxy.protocol === 'https:') {
+            debug('Creating `tls.Socket`: %o', this.connectOpts);
+            socket = tls.connect(this.connectOpts);
+        }
+        else {
+            debug('Creating `net.Socket`: %o', this.connectOpts);
+            socket = net.connect(this.connectOpts);
+        }
+        // Wait for the socket's `connect` event, so that this `callback()`
+        // function throws instead of the `http` request machinery. This is
+        // important for i.e. `PacProxyAgent` which determines a failed proxy
+        // connection via the `callback()` function throwing.
+        await (0, events_1.once)(socket, 'connect');
+        return socket;
+    }
+}
+HttpProxyAgent.protocols = ['http', 'https'];
+exports.HttpProxyAgent = HttpProxyAgent;
+function omit(obj, ...keys) {
+    const ret = {};
+    let key;
+    for (key in obj) {
+        if (!keys.includes(key)) {
+            ret[key] = obj[key];
+        }
+    }
+    return ret;
+}
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 3756:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+// See https://github.com/facebook/jest/issues/2549
+// eslint-disable-next-line node/prefer-global/url
+const {URL} = __nccwpck_require__(7016);
+const EventEmitter = __nccwpck_require__(4434);
+const tls = __nccwpck_require__(4756);
+const http2 = __nccwpck_require__(5675);
+const QuickLRU = __nccwpck_require__(7326);
+const delayAsyncDestroy = __nccwpck_require__(5086);
+
+const kCurrentStreamCount = Symbol('currentStreamCount');
+const kRequest = Symbol('request');
+const kOriginSet = Symbol('cachedOriginSet');
+const kGracefullyClosing = Symbol('gracefullyClosing');
+const kLength = Symbol('length');
+
+const nameKeys = [
+	// Not an Agent option actually
+	'createConnection',
+
+	// `http2.connect()` options
+	'maxDeflateDynamicTableSize',
+	'maxSettings',
+	'maxSessionMemory',
+	'maxHeaderListPairs',
+	'maxOutstandingPings',
+	'maxReservedRemoteStreams',
+	'maxSendHeaderBlockLength',
+	'paddingStrategy',
+	'peerMaxConcurrentStreams',
+	'settings',
+
+	// `tls.connect()` source options
+	'family',
+	'localAddress',
+	'rejectUnauthorized',
+
+	// `tls.connect()` secure context options
+	'pskCallback',
+	'minDHSize',
+
+	// `tls.connect()` destination options
+	// - `servername` is automatically validated, skip it
+	// - `host` and `port` just describe the destination server,
+	'path',
+	'socket',
+
+	// `tls.createSecureContext()` options
+	'ca',
+	'cert',
+	'sigalgs',
+	'ciphers',
+	'clientCertEngine',
+	'crl',
+	'dhparam',
+	'ecdhCurve',
+	'honorCipherOrder',
+	'key',
+	'privateKeyEngine',
+	'privateKeyIdentifier',
+	'maxVersion',
+	'minVersion',
+	'pfx',
+	'secureOptions',
+	'secureProtocol',
+	'sessionIdContext',
+	'ticketKeys'
+];
+
+const getSortedIndex = (array, value, compare) => {
+	let low = 0;
+	let high = array.length;
+
+	while (low < high) {
+		const mid = (low + high) >>> 1;
+
+		if (compare(array[mid], value)) {
+			low = mid + 1;
+		} else {
+			high = mid;
+		}
+	}
+
+	return low;
+};
+
+const compareSessions = (a, b) => a.remoteSettings.maxConcurrentStreams > b.remoteSettings.maxConcurrentStreams;
+
+// See https://tools.ietf.org/html/rfc8336
+const closeCoveredSessions = (where, session) => {
+	// Clients SHOULD NOT emit new requests on any connection whose Origin
+	// Set is a proper subset of another connection's Origin Set, and they
+	// SHOULD close it once all outstanding requests are satisfied.
+	for (let index = 0; index < where.length; index++) {
+		const coveredSession = where[index];
+
+		if (
+			// Unfortunately `.every()` returns true for an empty array
+			coveredSession[kOriginSet].length > 0
+
+			// The set is a proper subset when its length is less than the other set.
+			&& coveredSession[kOriginSet].length < session[kOriginSet].length
+
+			// And the other set includes all elements of the subset.
+			&& coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin))
+
+			// Makes sure that the session can handle all requests from the covered session.
+			&& (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams
+		) {
+			// This allows pending requests to finish and prevents making new requests.
+			gracefullyClose(coveredSession);
+		}
+	}
+};
+
+// This is basically inverted `closeCoveredSessions(...)`.
+const closeSessionIfCovered = (where, coveredSession) => {
+	for (let index = 0; index < where.length; index++) {
+		const session = where[index];
+
+		if (
+			coveredSession[kOriginSet].length > 0
+			&& coveredSession[kOriginSet].length < session[kOriginSet].length
+			&& coveredSession[kOriginSet].every(origin => session[kOriginSet].includes(origin))
+			&& (coveredSession[kCurrentStreamCount] + session[kCurrentStreamCount]) <= session.remoteSettings.maxConcurrentStreams
+		) {
+			gracefullyClose(coveredSession);
+
+			return true;
+		}
+	}
+
+	return false;
+};
+
+const gracefullyClose = session => {
+	session[kGracefullyClosing] = true;
+
+	if (session[kCurrentStreamCount] === 0) {
+		session.close();
+	}
+};
+
+class Agent extends EventEmitter {
+	constructor({timeout = 0, maxSessions = Number.POSITIVE_INFINITY, maxEmptySessions = 10, maxCachedTlsSessions = 100} = {}) {
+		super();
+
+		// SESSIONS[NORMALIZED_OPTIONS] = [];
+		this.sessions = {};
+
+		// The queue for creating new sessions. It looks like this:
+		// QUEUE[NORMALIZED_OPTIONS][NORMALIZED_ORIGIN] = ENTRY_FUNCTION
+		//
+		// It's faster when there are many origins. If there's only one, then QUEUE[`${options}:${origin}`] is faster.
+		// I guess object creation / deletion is causing the slowdown.
+		//
+		// The entry function has `listeners`, `completed` and `destroyed` properties.
+		// `listeners` is an array of objects containing `resolve` and `reject` functions.
+		// `completed` is a boolean. It's set to true after ENTRY_FUNCTION is executed.
+		// `destroyed` is a boolean. If it's set to true, the session will be destroyed if hasn't connected yet.
+		this.queue = {};
+
+		// Each session will use this timeout value.
+		this.timeout = timeout;
+
+		// Max sessions in total
+		this.maxSessions = maxSessions;
+
+		// Max empty sessions in total
+		this.maxEmptySessions = maxEmptySessions;
+
+		this._emptySessionCount = 0;
+		this._sessionCount = 0;
+
+		// We don't support push streams by default.
+		this.settings = {
+			enablePush: false,
+			initialWindowSize: 1024 * 1024 * 32 // 32MB, see https://github.com/nodejs/node/issues/38426
+		};
+
+		// Reusing TLS sessions increases performance.
+		this.tlsSessionCache = new QuickLRU({maxSize: maxCachedTlsSessions});
+	}
+
+	get protocol() {
+		return 'https:';
+	}
+
+	normalizeOptions(options) {
+		let normalized = '';
+
+		for (let index = 0; index < nameKeys.length; index++) {
+			const key = nameKeys[index];
+
+			normalized += ':';
+
+			if (options && options[key] !== undefined) {
+				normalized += options[key];
+			}
+		}
+
+		return normalized;
+	}
+
+	_processQueue() {
+		if (this._sessionCount >= this.maxSessions) {
+			this.closeEmptySessions(this.maxSessions - this._sessionCount + 1);
+			return;
+		}
+
+		// eslint-disable-next-line guard-for-in
+		for (const normalizedOptions in this.queue) {
+			// eslint-disable-next-line guard-for-in
+			for (const normalizedOrigin in this.queue[normalizedOptions]) {
+				const item = this.queue[normalizedOptions][normalizedOrigin];
+
+				// The entry function can be run only once.
+				if (!item.completed) {
+					item.completed = true;
+
+					item();
+				}
+			}
+		}
+	}
+
+	_isBetterSession(thisStreamCount, thatStreamCount) {
+		return thisStreamCount > thatStreamCount;
+	}
+
+	_accept(session, listeners, normalizedOrigin, options) {
+		let index = 0;
+
+		while (index < listeners.length && session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams) {
+			// We assume `resolve(...)` calls `request(...)` *directly*,
+			// otherwise the session will get overloaded.
+			listeners[index].resolve(session);
+
+			index++;
+		}
+
+		listeners.splice(0, index);
+
+		if (listeners.length > 0) {
+			this.getSession(normalizedOrigin, options, listeners);
+			listeners.length = 0;
+		}
+	}
+
+	getSession(origin, options, listeners) {
+		return new Promise((resolve, reject) => {
+			if (Array.isArray(listeners) && listeners.length > 0) {
+				listeners = [...listeners];
+
+				// Resolve the current promise ASAP, we're just moving the listeners.
+				// They will be executed at a different time.
+				resolve();
+			} else {
+				listeners = [{resolve, reject}];
+			}
+
+			try {
+				// Parse origin
+				if (typeof origin === 'string') {
+					origin = new URL(origin);
+				} else if (!(origin instanceof URL)) {
+					throw new TypeError('The `origin` argument needs to be a string or an URL object');
+				}
+
+				if (options) {
+					// Validate servername
+					const {servername} = options;
+					const {hostname} = origin;
+					if (servername && hostname !== servername) {
+						throw new Error(`Origin ${hostname} differs from servername ${servername}`);
+					}
+				}
+			} catch (error) {
+				for (let index = 0; index < listeners.length; index++) {
+					listeners[index].reject(error);
+				}
+
+				return;
+			}
+
+			const normalizedOptions = this.normalizeOptions(options);
+			const normalizedOrigin = origin.origin;
+
+			if (normalizedOptions in this.sessions) {
+				const sessions = this.sessions[normalizedOptions];
+
+				let maxConcurrentStreams = -1;
+				let currentStreamsCount = -1;
+				let optimalSession;
+
+				// We could just do this.sessions[normalizedOptions].find(...) but that isn't optimal.
+				// Additionally, we are looking for session which has biggest current pending streams count.
+				//
+				// |------------| |------------| |------------| |------------|
+				// | Session: A | | Session: B | | Session: C | | Session: D |
+				// | Pending: 5 |-| Pending: 8 |-| Pending: 9 |-| Pending: 4 |
+				// | Max:    10 | | Max:    10 | | Max:     9 | | Max:     5 |
+				// |------------| |------------| |------------| |------------|
+				//                     ^
+				//                     |
+				//     pick this one  --
+				//
+				for (let index = 0; index < sessions.length; index++) {
+					const session = sessions[index];
+
+					const sessionMaxConcurrentStreams = session.remoteSettings.maxConcurrentStreams;
+
+					if (sessionMaxConcurrentStreams < maxConcurrentStreams) {
+						break;
+					}
+
+					if (!session[kOriginSet].includes(normalizedOrigin)) {
+						continue;
+					}
+
+					const sessionCurrentStreamsCount = session[kCurrentStreamCount];
+
+					if (
+						sessionCurrentStreamsCount >= sessionMaxConcurrentStreams
+						|| session[kGracefullyClosing]
+						// Unfortunately the `close` event isn't called immediately,
+						// so `session.destroyed` is `true`, but `session.closed` is `false`.
+						|| session.destroyed
+					) {
+						continue;
+					}
+
+					// We only need set this once.
+					if (!optimalSession) {
+						maxConcurrentStreams = sessionMaxConcurrentStreams;
+					}
+
+					// Either get the session which has biggest current stream count or the lowest.
+					if (this._isBetterSession(sessionCurrentStreamsCount, currentStreamsCount)) {
+						optimalSession = session;
+						currentStreamsCount = sessionCurrentStreamsCount;
+					}
+				}
+
+				if (optimalSession) {
+					this._accept(optimalSession, listeners, normalizedOrigin, options);
+					return;
+				}
+			}
+
+			if (normalizedOptions in this.queue) {
+				if (normalizedOrigin in this.queue[normalizedOptions]) {
+					// There's already an item in the queue, just attach ourselves to it.
+					this.queue[normalizedOptions][normalizedOrigin].listeners.push(...listeners);
+					return;
+				}
+			} else {
+				this.queue[normalizedOptions] = {
+					[kLength]: 0
+				};
+			}
+
+			// The entry must be removed from the queue IMMEDIATELY when:
+			// 1. the session connects successfully,
+			// 2. an error occurs.
+			const removeFromQueue = () => {
+				// Our entry can be replaced. We cannot remove the new one.
+				if (normalizedOptions in this.queue && this.queue[normalizedOptions][normalizedOrigin] === entry) {
+					delete this.queue[normalizedOptions][normalizedOrigin];
+
+					if (--this.queue[normalizedOptions][kLength] === 0) {
+						delete this.queue[normalizedOptions];
+					}
+				}
+			};
+
+			// The main logic is here
+			const entry = async () => {
+				this._sessionCount++;
+
+				const name = `${normalizedOrigin}:${normalizedOptions}`;
+				let receivedSettings = false;
+				let socket;
+
+				try {
+					const computedOptions = {...options};
+
+					if (computedOptions.settings === undefined) {
+						computedOptions.settings = this.settings;
+					}
+
+					if (computedOptions.session === undefined) {
+						computedOptions.session = this.tlsSessionCache.get(name);
+					}
+
+					const createConnection = computedOptions.createConnection || this.createConnection;
+
+					// A hacky workaround to enable async `createConnection`
+					socket = await createConnection.call(this, origin, computedOptions);
+					computedOptions.createConnection = () => socket;
+
+					const session = http2.connect(origin, computedOptions);
+					session[kCurrentStreamCount] = 0;
+					session[kGracefullyClosing] = false;
+
+					// Node.js return https://false:443 instead of https://1.1.1.1:443
+					const getOriginSet = () => {
+						const {socket} = session;
+
+						let originSet;
+						if (socket.servername === false) {
+							socket.servername = socket.remoteAddress;
+							originSet = session.originSet;
+							socket.servername = false;
+						} else {
+							originSet = session.originSet;
+						}
+
+						return originSet;
+					};
+
+					const isFree = () => session[kCurrentStreamCount] < session.remoteSettings.maxConcurrentStreams;
+
+					session.socket.once('session', tlsSession => {
+						this.tlsSessionCache.set(name, tlsSession);
+					});
+
+					session.once('error', error => {
+						// Listeners are empty when the session successfully connected.
+						for (let index = 0; index < listeners.length; index++) {
+							listeners[index].reject(error);
+						}
+
+						// The connection got broken, purge the cache.
+						this.tlsSessionCache.delete(name);
+					});
+
+					session.setTimeout(this.timeout, () => {
+						// Terminates all streams owned by this session.
+						session.destroy();
+					});
+
+					session.once('close', () => {
+						this._sessionCount--;
+
+						if (receivedSettings) {
+							// Assumes session `close` is emitted after request `close`
+							this._emptySessionCount--;
+
+							// This cannot be moved to the stream logic,
+							// because there may be a session that hadn't made a single request.
+							const where = this.sessions[normalizedOptions];
+
+							if (where.length === 1) {
+								delete this.sessions[normalizedOptions];
+							} else {
+								where.splice(where.indexOf(session), 1);
+							}
+						} else {
+							// Broken connection
+							removeFromQueue();
+
+							const error = new Error('Session closed without receiving a SETTINGS frame');
+							error.code = 'HTTP2WRAPPER_NOSETTINGS';
+
+							for (let index = 0; index < listeners.length; index++) {
+								listeners[index].reject(error);
+							}
+						}
+
+						// There may be another session awaiting.
+						this._processQueue();
+					});
+
+					// Iterates over the queue and processes listeners.
+					const processListeners = () => {
+						const queue = this.queue[normalizedOptions];
+						if (!queue) {
+							return;
+						}
+
+						const originSet = session[kOriginSet];
+
+						for (let index = 0; index < originSet.length; index++) {
+							const origin = originSet[index];
+
+							if (origin in queue) {
+								const {listeners, completed} = queue[origin];
+
+								let index = 0;
+
+								// Prevents session overloading.
+								while (index < listeners.length && isFree()) {
+									// We assume `resolve(...)` calls `request(...)` *directly*,
+									// otherwise the session will get overloaded.
+									listeners[index].resolve(session);
+
+									index++;
+								}
+
+								queue[origin].listeners.splice(0, index);
+
+								if (queue[origin].listeners.length === 0 && !completed) {
+									delete queue[origin];
+
+									if (--queue[kLength] === 0) {
+										delete this.queue[normalizedOptions];
+										break;
+									}
+								}
+
+								// We're no longer free, no point in continuing.
+								if (!isFree()) {
+									break;
+								}
+							}
+						}
+					};
+
+					// The Origin Set cannot shrink. No need to check if it suddenly became covered by another one.
+					session.on('origin', () => {
+						session[kOriginSet] = getOriginSet() || [];
+						session[kGracefullyClosing] = false;
+						closeSessionIfCovered(this.sessions[normalizedOptions], session);
+
+						if (session[kGracefullyClosing] || !isFree()) {
+							return;
+						}
+
+						processListeners();
+
+						if (!isFree()) {
+							return;
+						}
+
+						// Close covered sessions (if possible).
+						closeCoveredSessions(this.sessions[normalizedOptions], session);
+					});
+
+					session.once('remoteSettings', () => {
+						// The Agent could have been destroyed already.
+						if (entry.destroyed) {
+							const error = new Error('Agent has been destroyed');
+
+							for (let index = 0; index < listeners.length; index++) {
+								listeners[index].reject(error);
+							}
+
+							session.destroy();
+							return;
+						}
+
+						// See https://github.com/nodejs/node/issues/38426
+						if (session.setLocalWindowSize) {
+							session.setLocalWindowSize(1024 * 1024 * 4); // 4 MB
+						}
+
+						session[kOriginSet] = getOriginSet() || [];
+
+						if (session.socket.encrypted) {
+							const mainOrigin = session[kOriginSet][0];
+							if (mainOrigin !== normalizedOrigin) {
+								const error = new Error(`Requested origin ${normalizedOrigin} does not match server ${mainOrigin}`);
+
+								for (let index = 0; index < listeners.length; index++) {
+									listeners[index].reject(error);
+								}
+
+								session.destroy();
+								return;
+							}
+						}
+
+						removeFromQueue();
+
+						{
+							const where = this.sessions;
+
+							if (normalizedOptions in where) {
+								const sessions = where[normalizedOptions];
+								sessions.splice(getSortedIndex(sessions, session, compareSessions), 0, session);
+							} else {
+								where[normalizedOptions] = [session];
+							}
+						}
+
+						receivedSettings = true;
+						this._emptySessionCount++;
+
+						this.emit('session', session);
+						this._accept(session, listeners, normalizedOrigin, options);
+
+						if (session[kCurrentStreamCount] === 0 && this._emptySessionCount > this.maxEmptySessions) {
+							this.closeEmptySessions(this._emptySessionCount - this.maxEmptySessions);
+						}
+
+						// `session.remoteSettings.maxConcurrentStreams` might get increased
+						session.on('remoteSettings', () => {
+							if (!isFree()) {
+								return;
+							}
+
+							processListeners();
+
+							if (!isFree()) {
+								return;
+							}
+
+							// In case the Origin Set changes
+							closeCoveredSessions(this.sessions[normalizedOptions], session);
+						});
+					});
+
+					// Shim `session.request()` in order to catch all streams
+					session[kRequest] = session.request;
+					session.request = (headers, streamOptions) => {
+						if (session[kGracefullyClosing]) {
+							throw new Error('The session is gracefully closing. No new streams are allowed.');
+						}
+
+						const stream = session[kRequest](headers, streamOptions);
+
+						// The process won't exit until the session is closed or all requests are gone.
+						session.ref();
+
+						if (session[kCurrentStreamCount]++ === 0) {
+							this._emptySessionCount--;
+						}
+
+						stream.once('close', () => {
+							if (--session[kCurrentStreamCount] === 0) {
+								this._emptySessionCount++;
+								session.unref();
+
+								if (this._emptySessionCount > this.maxEmptySessions || session[kGracefullyClosing]) {
+									session.close();
+									return;
+								}
+							}
+
+							if (session.destroyed || session.closed) {
+								return;
+							}
+
+							if (isFree() && !closeSessionIfCovered(this.sessions[normalizedOptions], session)) {
+								closeCoveredSessions(this.sessions[normalizedOptions], session);
+								processListeners();
+
+								if (session[kCurrentStreamCount] === 0) {
+									this._processQueue();
+								}
+							}
+						});
+
+						return stream;
+					};
+				} catch (error) {
+					removeFromQueue();
+					this._sessionCount--;
+
+					for (let index = 0; index < listeners.length; index++) {
+						listeners[index].reject(error);
+					}
+				}
+			};
+
+			entry.listeners = listeners;
+			entry.completed = false;
+			entry.destroyed = false;
+
+			this.queue[normalizedOptions][normalizedOrigin] = entry;
+			this.queue[normalizedOptions][kLength]++;
+			this._processQueue();
+		});
+	}
+
+	request(origin, options, headers, streamOptions) {
+		return new Promise((resolve, reject) => {
+			this.getSession(origin, options, [{
+				reject,
+				resolve: session => {
+					try {
+						const stream = session.request(headers, streamOptions);
+
+						// Do not throw before `request(...)` has been awaited
+						delayAsyncDestroy(stream);
+
+						resolve(stream);
+					} catch (error) {
+						reject(error);
+					}
+				}
+			}]);
+		});
+	}
+
+	async createConnection(origin, options) {
+		return Agent.connect(origin, options);
+	}
+
+	static connect(origin, options) {
+		options.ALPNProtocols = ['h2'];
+
+		const port = origin.port || 443;
+		const host = origin.hostname;
+
+		if (typeof options.servername === 'undefined') {
+			options.servername = host;
+		}
+
+		const socket = tls.connect(port, host, options);
+
+		if (options.socket) {
+			socket._peername = {
+				family: undefined,
+				address: undefined,
+				port
+			};
+		}
+
+		return socket;
+	}
+
+	closeEmptySessions(maxCount = Number.POSITIVE_INFINITY) {
+		let closedCount = 0;
+
+		const {sessions} = this;
+
+		// eslint-disable-next-line guard-for-in
+		for (const key in sessions) {
+			const thisSessions = sessions[key];
+
+			for (let index = 0; index < thisSessions.length; index++) {
+				const session = thisSessions[index];
+
+				if (session[kCurrentStreamCount] === 0) {
+					closedCount++;
+					session.close();
+
+					if (closedCount >= maxCount) {
+						return closedCount;
+					}
+				}
+			}
+		}
+
+		return closedCount;
+	}
+
+	destroy(reason) {
+		const {sessions, queue} = this;
+
+		// eslint-disable-next-line guard-for-in
+		for (const key in sessions) {
+			const thisSessions = sessions[key];
+
+			for (let index = 0; index < thisSessions.length; index++) {
+				thisSessions[index].destroy(reason);
+			}
+		}
+
+		// eslint-disable-next-line guard-for-in
+		for (const normalizedOptions in queue) {
+			const entries = queue[normalizedOptions];
+
+			// eslint-disable-next-line guard-for-in
+			for (const normalizedOrigin in entries) {
+				entries[normalizedOrigin].destroyed = true;
+			}
+		}
+
+		// New requests should NOT attach to destroyed sessions
+		this.queue = {};
+		this.tlsSessionCache.clear();
+	}
+
+	get emptySessionCount() {
+		return this._emptySessionCount;
+	}
+
+	get pendingSessionCount() {
+		return this._sessionCount - this._emptySessionCount;
+	}
+
+	get sessionCount() {
+		return this._sessionCount;
+	}
+}
+
+Agent.kCurrentStreamCount = kCurrentStreamCount;
+Agent.kGracefullyClosing = kGracefullyClosing;
+
+module.exports = {
+	Agent,
+	globalAgent: new Agent()
+};
+
+
+/***/ }),
+
+/***/ 2342:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+// See https://github.com/facebook/jest/issues/2549
+// eslint-disable-next-line node/prefer-global/url
+const {URL, urlToHttpOptions} = __nccwpck_require__(7016);
+const http = __nccwpck_require__(8611);
+const https = __nccwpck_require__(5692);
+const resolveALPN = __nccwpck_require__(2072);
+const QuickLRU = __nccwpck_require__(7326);
+const {Agent, globalAgent} = __nccwpck_require__(3756);
+const Http2ClientRequest = __nccwpck_require__(5270);
+const calculateServerName = __nccwpck_require__(5059);
+const delayAsyncDestroy = __nccwpck_require__(5086);
+
+const cache = new QuickLRU({maxSize: 100});
+const queue = new Map();
+
+const installSocket = (agent, socket, options) => {
+	socket._httpMessage = {shouldKeepAlive: true};
+
+	const onFree = () => {
+		agent.emit('free', socket, options);
+	};
+
+	socket.on('free', onFree);
+
+	const onClose = () => {
+		agent.removeSocket(socket, options);
+	};
+
+	socket.on('close', onClose);
+
+	const onTimeout = () => {
+		const {freeSockets} = agent;
+
+		for (const sockets of Object.values(freeSockets)) {
+			if (sockets.includes(socket)) {
+				socket.destroy();
+				return;
+			}
+		}
+	};
+
+	socket.on('timeout', onTimeout);
+
+	const onRemove = () => {
+		agent.removeSocket(socket, options);
+		socket.off('close', onClose);
+		socket.off('free', onFree);
+		socket.off('timeout', onTimeout);
+		socket.off('agentRemove', onRemove);
+	};
+
+	socket.on('agentRemove', onRemove);
+
+	agent.emit('free', socket, options);
+};
+
+const createResolveProtocol = (cache, queue = new Map(), connect = undefined) => {
+	return async options => {
+		const name = `${options.host}:${options.port}:${options.ALPNProtocols.sort()}`;
+
+		if (!cache.has(name)) {
+			if (queue.has(name)) {
+				const result = await queue.get(name);
+				return {alpnProtocol: result.alpnProtocol};
+			}
+
+			const {path} = options;
+			options.path = options.socketPath;
+
+			const resultPromise = resolveALPN(options, connect);
+			queue.set(name, resultPromise);
+
+			try {
+				const result = await resultPromise;
+
+				cache.set(name, result.alpnProtocol);
+				queue.delete(name);
+
+				options.path = path;
+
+				return result;
+			} catch (error) {
+				queue.delete(name);
+
+				options.path = path;
+
+				throw error;
+			}
+		}
+
+		return {alpnProtocol: cache.get(name)};
+	};
+};
+
+const defaultResolveProtocol = createResolveProtocol(cache, queue);
+
+module.exports = async (input, options, callback) => {
+	if (typeof input === 'string') {
+		input = urlToHttpOptions(new URL(input));
+	} else if (input instanceof URL) {
+		input = urlToHttpOptions(input);
+	} else {
+		input = {...input};
+	}
+
+	if (typeof options === 'function' || options === undefined) {
+		// (options, callback)
+		callback = options;
+		options = input;
+	} else {
+		// (input, options, callback)
+		options = Object.assign(input, options);
+	}
+
+	options.ALPNProtocols = options.ALPNProtocols || ['h2', 'http/1.1'];
+
+	if (!Array.isArray(options.ALPNProtocols) || options.ALPNProtocols.length === 0) {
+		throw new Error('The `ALPNProtocols` option must be an Array with at least one entry');
+	}
+
+	options.protocol = options.protocol || 'https:';
+	const isHttps = options.protocol === 'https:';
+
+	options.host = options.hostname || options.host || 'localhost';
+	options.session = options.tlsSession;
+	options.servername = options.servername || calculateServerName((options.headers && options.headers.host) || options.host);
+	options.port = options.port || (isHttps ? 443 : 80);
+	options._defaultAgent = isHttps ? https.globalAgent : http.globalAgent;
+
+	const resolveProtocol = options.resolveProtocol || defaultResolveProtocol;
+
+	// Note: We don't support `h2session` here
+
+	let {agent} = options;
+	if (agent !== undefined && agent !== false && agent.constructor.name !== 'Object') {
+		throw new Error('The `options.agent` can be only an object `http`, `https` or `http2` properties');
+	}
+
+	if (isHttps) {
+		options.resolveSocket = true;
+
+		let {socket, alpnProtocol, timeout} = await resolveProtocol(options);
+
+		if (timeout) {
+			if (socket) {
+				socket.destroy();
+			}
+
+			const error = new Error(`Timed out resolving ALPN: ${options.timeout} ms`);
+			error.code = 'ETIMEDOUT';
+			error.ms = options.timeout;
+
+			throw error;
+		}
+
+		// We can't accept custom `createConnection` because the API is different for HTTP/2
+		if (socket && options.createConnection) {
+			socket.destroy();
+			socket = undefined;
+		}
+
+		delete options.resolveSocket;
+
+		const isHttp2 = alpnProtocol === 'h2';
+
+		if (agent) {
+			agent = isHttp2 ? agent.http2 : agent.https;
+			options.agent = agent;
+		}
+
+		if (agent === undefined) {
+			agent = isHttp2 ? globalAgent : https.globalAgent;
+		}
+
+		if (socket) {
+			if (agent === false) {
+				socket.destroy();
+			} else {
+				const defaultCreateConnection = (isHttp2 ? Agent : https.Agent).prototype.createConnection;
+
+				if (agent.createConnection === defaultCreateConnection) {
+					if (isHttp2) {
+						options._reuseSocket = socket;
+					} else {
+						installSocket(agent, socket, options);
+					}
+				} else {
+					socket.destroy();
+				}
+			}
+		}
+
+		if (isHttp2) {
+			return delayAsyncDestroy(new Http2ClientRequest(options, callback));
+		}
+	} else if (agent) {
+		options.agent = agent.http;
+	}
+
+	// If we're sending HTTP/1.1, handle any explicitly set H2 headers in the options:
+	if (options.headers) {
+		options.headers = {...options.headers};
+
+		// :authority is equivalent to the HTTP/1.1 host header
+		if (options.headers[':authority']) {
+			if (!options.headers.host) {
+				options.headers.host = options.headers[':authority'];
+			}
+
+			delete options.headers[':authority'];
+		}
+
+		// Remove other HTTP/2 headers as they have their counterparts in the options
+		delete options.headers[':method'];
+		delete options.headers[':scheme'];
+		delete options.headers[':path'];
+	}
+
+	return delayAsyncDestroy(http.request(options, callback));
+};
+
+module.exports.protocolCache = cache;
+module.exports.resolveProtocol = defaultResolveProtocol;
+module.exports.createResolveProtocol = createResolveProtocol;
+
+
+/***/ }),
+
+/***/ 5270:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+// See https://github.com/facebook/jest/issues/2549
+// eslint-disable-next-line node/prefer-global/url
+const {URL, urlToHttpOptions} = __nccwpck_require__(7016);
+const http2 = __nccwpck_require__(5675);
+const {Writable} = __nccwpck_require__(2203);
+const {Agent, globalAgent} = __nccwpck_require__(3756);
+const IncomingMessage = __nccwpck_require__(1899);
+const proxyEvents = __nccwpck_require__(6877);
+const {
+	ERR_INVALID_ARG_TYPE,
+	ERR_INVALID_PROTOCOL,
+	ERR_HTTP_HEADERS_SENT
+} = __nccwpck_require__(8108);
+const validateHeaderName = __nccwpck_require__(311);
+const validateHeaderValue = __nccwpck_require__(2495);
+const proxySocketHandler = __nccwpck_require__(28);
+
+const {
+	HTTP2_HEADER_STATUS,
+	HTTP2_HEADER_METHOD,
+	HTTP2_HEADER_PATH,
+	HTTP2_HEADER_AUTHORITY,
+	HTTP2_METHOD_CONNECT
+} = http2.constants;
+
+const kHeaders = Symbol('headers');
+const kOrigin = Symbol('origin');
+const kSession = Symbol('session');
+const kOptions = Symbol('options');
+const kFlushedHeaders = Symbol('flushedHeaders');
+const kJobs = Symbol('jobs');
+const kPendingAgentPromise = Symbol('pendingAgentPromise');
+
+class ClientRequest extends Writable {
+	constructor(input, options, callback) {
+		super({
+			autoDestroy: false,
+			emitClose: false
+		});
+
+		if (typeof input === 'string') {
+			input = urlToHttpOptions(new URL(input));
+		} else if (input instanceof URL) {
+			input = urlToHttpOptions(input);
+		} else {
+			input = {...input};
+		}
+
+		if (typeof options === 'function' || options === undefined) {
+			// (options, callback)
+			callback = options;
+			options = input;
+		} else {
+			// (input, options, callback)
+			options = Object.assign(input, options);
+		}
+
+		if (options.h2session) {
+			this[kSession] = options.h2session;
+
+			if (this[kSession].destroyed) {
+				throw new Error('The session has been closed already');
+			}
+
+			this.protocol = this[kSession].socket.encrypted ? 'https:' : 'http:';
+		} else if (options.agent === false) {
+			this.agent = new Agent({maxEmptySessions: 0});
+		} else if (typeof options.agent === 'undefined' || options.agent === null) {
+			this.agent = globalAgent;
+		} else if (typeof options.agent.request === 'function') {
+			this.agent = options.agent;
+		} else {
+			throw new ERR_INVALID_ARG_TYPE('options.agent', ['http2wrapper.Agent-like Object', 'undefined', 'false'], options.agent);
+		}
+
+		if (this.agent) {
+			this.protocol = this.agent.protocol;
+		}
+
+		if (options.protocol && options.protocol !== this.protocol) {
+			throw new ERR_INVALID_PROTOCOL(options.protocol, this.protocol);
+		}
+
+		if (!options.port) {
+			options.port = options.defaultPort || (this.agent && this.agent.defaultPort) || 443;
+		}
+
+		options.host = options.hostname || options.host || 'localhost';
+
+		// Unused
+		delete options.hostname;
+
+		const {timeout} = options;
+		options.timeout = undefined;
+
+		this[kHeaders] = Object.create(null);
+		this[kJobs] = [];
+
+		this[kPendingAgentPromise] = undefined;
+
+		this.socket = null;
+		this.connection = null;
+
+		this.method = options.method || 'GET';
+
+		if (!(this.method === 'CONNECT' && (options.path === '/' || options.path === undefined))) {
+			this.path = options.path;
+		}
+
+		this.res = null;
+		this.aborted = false;
+		this.reusedSocket = false;
+
+		const {headers} = options;
+		if (headers) {
+			// eslint-disable-next-line guard-for-in
+			for (const header in headers) {
+				this.setHeader(header, headers[header]);
+			}
+		}
+
+		if (options.auth && !('authorization' in this[kHeaders])) {
+			this[kHeaders].authorization = 'Basic ' + Buffer.from(options.auth).toString('base64');
+		}
+
+		options.session = options.tlsSession;
+		options.path = options.socketPath;
+
+		this[kOptions] = options;
+
+		// Clients that generate HTTP/2 requests directly SHOULD use the :authority pseudo-header field instead of the Host header field.
+		this[kOrigin] = new URL(`${this.protocol}//${options.servername || options.host}:${options.port}`);
+
+		// A socket is being reused
+		const reuseSocket = options._reuseSocket;
+		if (reuseSocket) {
+			options.createConnection = (...args) => {
+				if (reuseSocket.destroyed) {
+					return this.agent.createConnection(...args);
+				}
+
+				return reuseSocket;
+			};
+
+			// eslint-disable-next-line promise/prefer-await-to-then
+			this.agent.getSession(this[kOrigin], this[kOptions]).catch(() => {});
+		}
+
+		if (timeout) {
+			this.setTimeout(timeout);
+		}
+
+		if (callback) {
+			this.once('response', callback);
+		}
+
+		this[kFlushedHeaders] = false;
+	}
+
+	get method() {
+		return this[kHeaders][HTTP2_HEADER_METHOD];
+	}
+
+	set method(value) {
+		if (value) {
+			this[kHeaders][HTTP2_HEADER_METHOD] = value.toUpperCase();
+		}
+	}
+
+	get path() {
+		const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH;
+
+		return this[kHeaders][header];
+	}
+
+	set path(value) {
+		if (value) {
+			const header = this.method === 'CONNECT' ? HTTP2_HEADER_AUTHORITY : HTTP2_HEADER_PATH;
+
+			this[kHeaders][header] = value;
+		}
+	}
+
+	get host() {
+		return this[kOrigin].hostname;
+	}
+
+	set host(_value) {
+		// Do nothing as this is read only.
+	}
+
+	get _mustNotHaveABody() {
+		return this.method === 'GET' || this.method === 'HEAD' || this.method === 'DELETE';
+	}
+
+	_write(chunk, encoding, callback) {
+		// https://github.com/nodejs/node/blob/654df09ae0c5e17d1b52a900a545f0664d8c7627/lib/internal/http2/util.js#L148-L156
+		if (this._mustNotHaveABody) {
+			callback(new Error('The GET, HEAD and DELETE methods must NOT have a body'));
+			/* istanbul ignore next: Node.js 12 throws directly */
+			return;
+		}
+
+		this.flushHeaders();
+
+		const callWrite = () => this._request.write(chunk, encoding, callback);
+		if (this._request) {
+			callWrite();
+		} else {
+			this[kJobs].push(callWrite);
+		}
+	}
+
+	_final(callback) {
+		this.flushHeaders();
+
+		const callEnd = () => {
+			// For GET, HEAD and DELETE and CONNECT
+			if (this._mustNotHaveABody || this.method === 'CONNECT') {
+				callback();
+				return;
+			}
+
+			this._request.end(callback);
+		};
+
+		if (this._request) {
+			callEnd();
+		} else {
+			this[kJobs].push(callEnd);
+		}
+	}
+
+	abort() {
+		if (this.res && this.res.complete) {
+			return;
+		}
+
+		if (!this.aborted) {
+			process.nextTick(() => this.emit('abort'));
+		}
+
+		this.aborted = true;
+
+		this.destroy();
+	}
+
+	async _destroy(error, callback) {
+		if (this.res) {
+			this.res._dump();
+		}
+
+		if (this._request) {
+			this._request.destroy();
+		} else {
+			process.nextTick(() => {
+				this.emit('close');
+			});
+		}
+
+		try {
+			await this[kPendingAgentPromise];
+		} catch (internalError) {
+			if (this.aborted) {
+				error = internalError;
+			}
+		}
+
+		callback(error);
+	}
+
+	async flushHeaders() {
+		if (this[kFlushedHeaders] || this.destroyed) {
+			return;
+		}
+
+		this[kFlushedHeaders] = true;
+
+		const isConnectMethod = this.method === HTTP2_METHOD_CONNECT;
+
+		// The real magic is here
+		const onStream = stream => {
+			this._request = stream;
+
+			if (this.destroyed) {
+				stream.destroy();
+				return;
+			}
+
+			// Forwards `timeout`, `continue`, `close` and `error` events to this instance.
+			if (!isConnectMethod) {
+				// TODO: Should we proxy `close` here?
+				proxyEvents(stream, this, ['timeout', 'continue']);
+			}
+
+			stream.once('error', error => {
+				this.destroy(error);
+			});
+
+			stream.once('aborted', () => {
+				const {res} = this;
+				if (res) {
+					res.aborted = true;
+					res.emit('aborted');
+					res.destroy();
+				} else {
+					this.destroy(new Error('The server aborted the HTTP/2 stream'));
+				}
+			});
+
+			const onResponse = (headers, flags, rawHeaders) => {
+				// If we were to emit raw request stream, it would be as fast as the native approach.
+				// Note that wrapping the raw stream in a Proxy instance won't improve the performance (already tested it).
+				const response = new IncomingMessage(this.socket, stream.readableHighWaterMark);
+				this.res = response;
+
+				// Undocumented, but it is used by `cacheable-request`
+				response.url = `${this[kOrigin].origin}${this.path}`;
+
+				response.req = this;
+				response.statusCode = headers[HTTP2_HEADER_STATUS];
+				response.headers = headers;
+				response.rawHeaders = rawHeaders;
+
+				response.once('end', () => {
+					response.complete = true;
+
+					// Has no effect, just be consistent with the Node.js behavior
+					response.socket = null;
+					response.connection = null;
+				});
+
+				if (isConnectMethod) {
+					response.upgrade = true;
+
+					// The HTTP1 API says the socket is detached here,
+					// but we can't do that so we pass the original HTTP2 request.
+					if (this.emit('connect', response, stream, Buffer.alloc(0))) {
+						this.emit('close');
+					} else {
+						// No listeners attached, destroy the original request.
+						stream.destroy();
+					}
+				} else {
+					// Forwards data
+					stream.on('data', chunk => {
+						if (!response._dumped && !response.push(chunk)) {
+							stream.pause();
+						}
+					});
+
+					stream.once('end', () => {
+						if (!this.aborted) {
+							response.push(null);
+						}
+					});
+
+					if (!this.emit('response', response)) {
+						// No listeners attached, dump the response.
+						response._dump();
+					}
+				}
+			};
+
+			// This event tells we are ready to listen for the data.
+			stream.once('response', onResponse);
+
+			// Emits `information` event
+			stream.once('headers', headers => this.emit('information', {statusCode: headers[HTTP2_HEADER_STATUS]}));
+
+			stream.once('trailers', (trailers, flags, rawTrailers) => {
+				const {res} = this;
+
+				// https://github.com/nodejs/node/issues/41251
+				if (res === null) {
+					onResponse(trailers, flags, rawTrailers);
+					return;
+				}
+
+				// Assigns trailers to the response object.
+				res.trailers = trailers;
+				res.rawTrailers = rawTrailers;
+			});
+
+			stream.once('close', () => {
+				const {aborted, res} = this;
+				if (res) {
+					if (aborted) {
+						res.aborted = true;
+						res.emit('aborted');
+						res.destroy();
+					}
+
+					const finish = () => {
+						res.emit('close');
+
+						this.destroy();
+						this.emit('close');
+					};
+
+					if (res.readable) {
+						res.once('end', finish);
+					} else {
+						finish();
+					}
+
+					return;
+				}
+
+				if (!this.destroyed) {
+					this.destroy(new Error('The HTTP/2 stream has been early terminated'));
+					this.emit('close');
+					return;
+				}
+
+				this.destroy();
+				this.emit('close');
+			});
+
+			this.socket = new Proxy(stream, proxySocketHandler);
+
+			for (const job of this[kJobs]) {
+				job();
+			}
+
+			this[kJobs].length = 0;
+
+			this.emit('socket', this.socket);
+		};
+
+		if (!(HTTP2_HEADER_AUTHORITY in this[kHeaders]) && !isConnectMethod) {
+			this[kHeaders][HTTP2_HEADER_AUTHORITY] = this[kOrigin].host;
+		}
+
+		// Makes a HTTP2 request
+		if (this[kSession]) {
+			try {
+				onStream(this[kSession].request(this[kHeaders]));
+			} catch (error) {
+				this.destroy(error);
+			}
+		} else {
+			this.reusedSocket = true;
+
+			try {
+				const promise = this.agent.request(this[kOrigin], this[kOptions], this[kHeaders]);
+				this[kPendingAgentPromise] = promise;
+
+				onStream(await promise);
+
+				this[kPendingAgentPromise] = false;
+			} catch (error) {
+				this[kPendingAgentPromise] = false;
+
+				this.destroy(error);
+			}
+		}
+	}
+
+	get connection() {
+		return this.socket;
+	}
+
+	set connection(value) {
+		this.socket = value;
+	}
+
+	getHeaderNames() {
+		return Object.keys(this[kHeaders]);
+	}
+
+	hasHeader(name) {
+		if (typeof name !== 'string') {
+			throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
+		}
+
+		return Boolean(this[kHeaders][name.toLowerCase()]);
+	}
+
+	getHeader(name) {
+		if (typeof name !== 'string') {
+			throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
+		}
+
+		return this[kHeaders][name.toLowerCase()];
+	}
+
+	get headersSent() {
+		return this[kFlushedHeaders];
+	}
+
+	removeHeader(name) {
+		if (typeof name !== 'string') {
+			throw new ERR_INVALID_ARG_TYPE('name', 'string', name);
+		}
+
+		if (this.headersSent) {
+			throw new ERR_HTTP_HEADERS_SENT('remove');
+		}
+
+		delete this[kHeaders][name.toLowerCase()];
+	}
+
+	setHeader(name, value) {
+		if (this.headersSent) {
+			throw new ERR_HTTP_HEADERS_SENT('set');
+		}
+
+		validateHeaderName(name);
+		validateHeaderValue(name, value);
+
+		const lowercased = name.toLowerCase();
+
+		if (lowercased === 'connection') {
+			if (value.toLowerCase() === 'keep-alive') {
+				return;
+			}
+
+			throw new Error(`Invalid 'connection' header: ${value}`);
+		}
+
+		if (lowercased === 'host' && this.method === 'CONNECT') {
+			this[kHeaders][HTTP2_HEADER_AUTHORITY] = value;
+		} else {
+			this[kHeaders][lowercased] = value;
+		}
+	}
+
+	setNoDelay() {
+		// HTTP2 sockets cannot be malformed, do nothing.
+	}
+
+	setSocketKeepAlive() {
+		// HTTP2 sockets cannot be malformed, do nothing.
+	}
+
+	setTimeout(ms, callback) {
+		const applyTimeout = () => this._request.setTimeout(ms, callback);
+
+		if (this._request) {
+			applyTimeout();
+		} else {
+			this[kJobs].push(applyTimeout);
+		}
+
+		return this;
+	}
+
+	get maxHeadersCount() {
+		if (!this.destroyed && this._request) {
+			return this._request.session.localSettings.maxHeaderListSize;
+		}
+
+		return undefined;
+	}
+
+	set maxHeadersCount(_value) {
+		// Updating HTTP2 settings would affect all requests, do nothing.
+	}
+}
+
+module.exports = ClientRequest;
+
+
+/***/ }),
+
+/***/ 1899:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {Readable} = __nccwpck_require__(2203);
+
+class IncomingMessage extends Readable {
+	constructor(socket, highWaterMark) {
+		super({
+			emitClose: false,
+			autoDestroy: true,
+			highWaterMark
+		});
+
+		this.statusCode = null;
+		this.statusMessage = '';
+		this.httpVersion = '2.0';
+		this.httpVersionMajor = 2;
+		this.httpVersionMinor = 0;
+		this.headers = {};
+		this.trailers = {};
+		this.req = null;
+
+		this.aborted = false;
+		this.complete = false;
+		this.upgrade = null;
+
+		this.rawHeaders = [];
+		this.rawTrailers = [];
+
+		this.socket = socket;
+
+		this._dumped = false;
+	}
+
+	get connection() {
+		return this.socket;
+	}
+
+	set connection(value) {
+		this.socket = value;
+	}
+
+	_destroy(error, callback) {
+		if (!this.readableEnded) {
+			this.aborted = true;
+		}
+
+		// See https://github.com/nodejs/node/issues/35303
+		callback();
+
+		this.req._request.destroy(error);
+	}
+
+	setTimeout(ms, callback) {
+		this.req.setTimeout(ms, callback);
+		return this;
+	}
+
+	_dump() {
+		if (!this._dumped) {
+			this._dumped = true;
+
+			this.removeAllListeners('data');
+			this.resume();
+		}
+	}
+
+	_read() {
+		if (this.req) {
+			this.req._request.resume();
+		}
+	}
+}
+
+module.exports = IncomingMessage;
+
+
+/***/ }),
+
+/***/ 5409:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const http2 = __nccwpck_require__(5675);
+const {
+	Agent,
+	globalAgent
+} = __nccwpck_require__(3756);
+const ClientRequest = __nccwpck_require__(5270);
+const IncomingMessage = __nccwpck_require__(1899);
+const auto = __nccwpck_require__(2342);
+const {
+	HttpOverHttp2,
+	HttpsOverHttp2
+} = __nccwpck_require__(449);
+const Http2OverHttp2 = __nccwpck_require__(2016);
+const {
+	Http2OverHttp,
+	Http2OverHttps
+} = __nccwpck_require__(3017);
+const validateHeaderName = __nccwpck_require__(311);
+const validateHeaderValue = __nccwpck_require__(2495);
+
+const request = (url, options, callback) => new ClientRequest(url, options, callback);
+
+const get = (url, options, callback) => {
+	// eslint-disable-next-line unicorn/prevent-abbreviations
+	const req = new ClientRequest(url, options, callback);
+	req.end();
+
+	return req;
+};
+
+module.exports = {
+	...http2,
+	ClientRequest,
+	IncomingMessage,
+	Agent,
+	globalAgent,
+	request,
+	get,
+	auto,
+	proxies: {
+		HttpOverHttp2,
+		HttpsOverHttp2,
+		Http2OverHttp2,
+		Http2OverHttp,
+		Http2OverHttps
+	},
+	validateHeaderName,
+	validateHeaderValue
+};
+
+
+/***/ }),
+
+/***/ 7598:
+/***/ ((module) => {
+
+
+
+module.exports = self => {
+	const {username, password} = self.proxyOptions.url;
+
+	if (username || password) {
+		const data = `${username}:${password}`;
+		const authorization = `Basic ${Buffer.from(data).toString('base64')}`;
+
+		return {
+			'proxy-authorization': authorization,
+			authorization
+		};
+	}
+
+	return {};
+};
+
+
+/***/ }),
+
+/***/ 449:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const tls = __nccwpck_require__(4756);
+const http = __nccwpck_require__(8611);
+const https = __nccwpck_require__(5692);
+const JSStreamSocket = __nccwpck_require__(823);
+const {globalAgent} = __nccwpck_require__(3756);
+const UnexpectedStatusCodeError = __nccwpck_require__(2609);
+const initialize = __nccwpck_require__(4661);
+const getAuthorizationHeaders = __nccwpck_require__(7598);
+
+const createConnection = (self, options, callback) => {
+	(async () => {
+		try {
+			const {proxyOptions} = self;
+			const {url, headers, raw} = proxyOptions;
+
+			const stream = await globalAgent.request(url, proxyOptions, {
+				...getAuthorizationHeaders(self),
+				...headers,
+				':method': 'CONNECT',
+				':authority': `${options.host}:${options.port}`
+			});
+
+			stream.once('error', callback);
+			stream.once('response', headers => {
+				const statusCode = headers[':status'];
+
+				if (statusCode !== 200) {
+					callback(new UnexpectedStatusCodeError(statusCode, ''));
+					return;
+				}
+
+				const encrypted = self instanceof https.Agent;
+
+				if (raw && encrypted) {
+					options.socket = stream;
+					const secureStream = tls.connect(options);
+
+					secureStream.once('close', () => {
+						stream.destroy();
+					});
+
+					callback(null, secureStream);
+					return;
+				}
+
+				const socket = new JSStreamSocket(stream);
+				socket.encrypted = false;
+				socket._handle.getpeername = out => {
+					out.family = undefined;
+					out.address = undefined;
+					out.port = undefined;
+				};
+
+				callback(null, socket);
+			});
+		} catch (error) {
+			callback(error);
+		}
+	})();
+};
+
+class HttpOverHttp2 extends http.Agent {
+	constructor(options) {
+		super(options);
+
+		initialize(this, options.proxyOptions);
+	}
+
+	createConnection(options, callback) {
+		createConnection(this, options, callback);
+	}
+}
+
+class HttpsOverHttp2 extends https.Agent {
+	constructor(options) {
+		super(options);
+
+		initialize(this, options.proxyOptions);
+	}
+
+	createConnection(options, callback) {
+		createConnection(this, options, callback);
+	}
+}
+
+module.exports = {
+	HttpOverHttp2,
+	HttpsOverHttp2
+};
+
+
+/***/ }),
+
+/***/ 3017:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const http = __nccwpck_require__(8611);
+const https = __nccwpck_require__(5692);
+const Http2OverHttpX = __nccwpck_require__(4418);
+const getAuthorizationHeaders = __nccwpck_require__(7598);
+
+const getStream = request => new Promise((resolve, reject) => {
+	const onConnect = (response, socket, head) => {
+		socket.unshift(head);
+
+		request.off('error', reject);
+		resolve([socket, response.statusCode, response.statusMessage]);
+	};
+
+	request.once('error', reject);
+	request.once('connect', onConnect);
+});
+
+class Http2OverHttp extends Http2OverHttpX {
+	async _getProxyStream(authority) {
+		const {proxyOptions} = this;
+		const {url, headers} = this.proxyOptions;
+
+		const network = url.protocol === 'https:' ? https : http;
+
+		// `new URL('https://localhost/httpbin.org:443')` results in
+		// a `/httpbin.org:443` path, which has an invalid leading slash.
+		const request = network.request({
+			...proxyOptions,
+			hostname: url.hostname,
+			port: url.port,
+			path: authority,
+			headers: {
+				...getAuthorizationHeaders(this),
+				...headers,
+				host: authority
+			},
+			method: 'CONNECT'
+		}).end();
+
+		return getStream(request);
+	}
+}
+
+module.exports = {
+	Http2OverHttp,
+	Http2OverHttps: Http2OverHttp
+};
+
+
+/***/ }),
+
+/***/ 2016:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {globalAgent} = __nccwpck_require__(3756);
+const Http2OverHttpX = __nccwpck_require__(4418);
+const getAuthorizationHeaders = __nccwpck_require__(7598);
+
+const getStatusCode = stream => new Promise((resolve, reject) => {
+	stream.once('error', reject);
+	stream.once('response', headers => {
+		stream.off('error', reject);
+		resolve(headers[':status']);
+	});
+});
+
+class Http2OverHttp2 extends Http2OverHttpX {
+	async _getProxyStream(authority) {
+		const {proxyOptions} = this;
+
+		const headers = {
+			...getAuthorizationHeaders(this),
+			...proxyOptions.headers,
+			':method': 'CONNECT',
+			':authority': authority
+		};
+
+		const stream = await globalAgent.request(proxyOptions.url, proxyOptions, headers);
+		const statusCode = await getStatusCode(stream);
+
+		return [stream, statusCode, ''];
+	}
+}
+
+module.exports = Http2OverHttp2;
+
+
+/***/ }),
+
+/***/ 4418:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {Agent} = __nccwpck_require__(3756);
+const JSStreamSocket = __nccwpck_require__(823);
+const UnexpectedStatusCodeError = __nccwpck_require__(2609);
+const initialize = __nccwpck_require__(4661);
+
+class Http2OverHttpX extends Agent {
+	constructor(options) {
+		super(options);
+
+		initialize(this, options.proxyOptions);
+	}
+
+	async createConnection(origin, options) {
+		const authority = `${origin.hostname}:${origin.port || 443}`;
+
+		const [stream, statusCode, statusMessage] = await this._getProxyStream(authority);
+		if (statusCode !== 200) {
+			throw new UnexpectedStatusCodeError(statusCode, statusMessage);
+		}
+
+		if (this.proxyOptions.raw) {
+			options.socket = stream;
+		} else {
+			const socket = new JSStreamSocket(stream);
+			socket.encrypted = false;
+			socket._handle.getpeername = out => {
+				out.family = undefined;
+				out.address = undefined;
+				out.port = undefined;
+			};
+
+			return socket;
+		}
+
+		return super.createConnection(origin, options);
+	}
+}
+
+module.exports = Http2OverHttpX;
+
+
+/***/ }),
+
+/***/ 4661:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+// See https://github.com/facebook/jest/issues/2549
+// eslint-disable-next-line node/prefer-global/url
+const {URL} = __nccwpck_require__(7016);
+const checkType = __nccwpck_require__(7940);
+
+module.exports = (self, proxyOptions) => {
+	checkType('proxyOptions', proxyOptions, ['object']);
+	checkType('proxyOptions.headers', proxyOptions.headers, ['object', 'undefined']);
+	checkType('proxyOptions.raw', proxyOptions.raw, ['boolean', 'undefined']);
+	checkType('proxyOptions.url', proxyOptions.url, [URL, 'string']);
+
+	const url = new URL(proxyOptions.url);
+
+	self.proxyOptions = {
+		raw: true,
+		...proxyOptions,
+		headers: {...proxyOptions.headers},
+		url
+	};
+};
+
+
+/***/ }),
+
+/***/ 2609:
+/***/ ((module) => {
+
+
+
+class UnexpectedStatusCodeError extends Error {
+	constructor(statusCode, statusMessage = '') {
+		super(`The proxy server rejected the request with status code ${statusCode} (${statusMessage || 'empty status message'})`);
+		this.statusCode = statusCode;
+		this.statusMessage = statusMessage;
+	}
+}
+
+module.exports = UnexpectedStatusCodeError;
+
+
+/***/ }),
+
+/***/ 5059:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {isIP} = __nccwpck_require__(9278);
+const assert = __nccwpck_require__(2613);
+
+const getHost = host => {
+	if (host[0] === '[') {
+		const idx = host.indexOf(']');
+
+		assert(idx !== -1);
+		return host.slice(1, idx);
+	}
+
+	const idx = host.indexOf(':');
+	if (idx === -1) {
+		return host;
+	}
+
+	return host.slice(0, idx);
+};
+
+module.exports = host => {
+	const servername = getHost(host);
+
+	if (isIP(servername)) {
+		return '';
+	}
+
+	return servername;
+};
+
+
+/***/ }),
+
+/***/ 7940:
+/***/ ((module) => {
+
+
+
+const checkType = (name, value, types) => {
+	const valid = types.some(type => {
+		const typeofType = typeof type;
+		if (typeofType === 'string') {
+			return typeof value === type;
+		}
+
+		return value instanceof type;
+	});
+
+	if (!valid) {
+		const names = types.map(type => typeof type === 'string' ? type : type.name);
+
+		throw new TypeError(`Expected '${name}' to be a type of ${names.join(' or ')}, got ${typeof value}`);
+	}
+};
+
+module.exports = checkType;
+
+
+/***/ }),
+
+/***/ 5086:
+/***/ ((module) => {
+
+
+
+module.exports = stream => {
+	if (stream.listenerCount('error') !== 0) {
+		return stream;
+	}
+
+	stream.__destroy = stream._destroy;
+	stream._destroy = (...args) => {
+		const callback = args.pop();
+
+		stream.__destroy(...args, async error => {
+			await Promise.resolve();
+			callback(error);
+		});
+	};
+
+	const onError = error => {
+		// eslint-disable-next-line promise/prefer-await-to-then
+		Promise.resolve().then(() => {
+			stream.emit('error', error);
+		});
+	};
+
+	stream.once('error', onError);
+
+	// eslint-disable-next-line promise/prefer-await-to-then
+	Promise.resolve().then(() => {
+		stream.off('error', onError);
+	});
+
+	return stream;
+};
+
+
+/***/ }),
+
+/***/ 8108:
+/***/ ((module) => {
+
+
+/* istanbul ignore file: https://github.com/nodejs/node/blob/master/lib/internal/errors.js */
+
+const makeError = (Base, key, getMessage) => {
+	module.exports[key] = class NodeError extends Base {
+		constructor(...args) {
+			super(typeof getMessage === 'string' ? getMessage : getMessage(args));
+			this.name = `${super.name} [${key}]`;
+			this.code = key;
+		}
+	};
+};
+
+makeError(TypeError, 'ERR_INVALID_ARG_TYPE', args => {
+	const type = args[0].includes('.') ? 'property' : 'argument';
+
+	let valid = args[1];
+	const isManyTypes = Array.isArray(valid);
+
+	if (isManyTypes) {
+		valid = `${valid.slice(0, -1).join(', ')} or ${valid.slice(-1)}`;
+	}
+
+	return `The "${args[0]}" ${type} must be ${isManyTypes ? 'one of' : 'of'} type ${valid}. Received ${typeof args[2]}`;
+});
+
+makeError(TypeError, 'ERR_INVALID_PROTOCOL', args =>
+	`Protocol "${args[0]}" not supported. Expected "${args[1]}"`
+);
+
+makeError(Error, 'ERR_HTTP_HEADERS_SENT', args =>
+	`Cannot ${args[0]} headers after they are sent to the client`
+);
+
+makeError(TypeError, 'ERR_INVALID_HTTP_TOKEN', args =>
+	`${args[0]} must be a valid HTTP token [${args[1]}]`
+);
+
+makeError(TypeError, 'ERR_HTTP_INVALID_HEADER_VALUE', args =>
+	`Invalid value "${args[0]} for header "${args[1]}"`
+);
+
+makeError(TypeError, 'ERR_INVALID_CHAR', args =>
+	`Invalid character in ${args[0]} [${args[1]}]`
+);
+
+makeError(
+	Error,
+	'ERR_HTTP2_NO_SOCKET_MANIPULATION',
+	'HTTP/2 sockets should not be directly manipulated (e.g. read and written)'
+);
+
+
+/***/ }),
+
+/***/ 3986:
+/***/ ((module) => {
+
+
+
+module.exports = header => {
+	switch (header) {
+		case ':method':
+		case ':scheme':
+		case ':authority':
+		case ':path':
+			return true;
+		default:
+			return false;
+	}
+};
+
+
+/***/ }),
+
+/***/ 823:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const stream = __nccwpck_require__(2203);
+const tls = __nccwpck_require__(4756);
+
+// Really awesome hack.
+const JSStreamSocket = (new tls.TLSSocket(new stream.PassThrough()))._handle._parentWrap.constructor;
+
+module.exports = JSStreamSocket;
+
+
+/***/ }),
+
+/***/ 6877:
+/***/ ((module) => {
+
+
+
+module.exports = (from, to, events) => {
+	for (const event of events) {
+		from.on(event, (...args) => to.emit(event, ...args));
+	}
+};
+
+
+/***/ }),
+
+/***/ 28:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {ERR_HTTP2_NO_SOCKET_MANIPULATION} = __nccwpck_require__(8108);
+
+/* istanbul ignore file */
+/* https://github.com/nodejs/node/blob/6eec858f34a40ffa489c1ec54bb24da72a28c781/lib/internal/http2/compat.js#L195-L272 */
+
+const proxySocketHandler = {
+	has(stream, property) {
+		// Replaced [kSocket] with .socket
+		const reference = stream.session === undefined ? stream : stream.session.socket;
+		return (property in stream) || (property in reference);
+	},
+
+	get(stream, property) {
+		switch (property) {
+			case 'on':
+			case 'once':
+			case 'end':
+			case 'emit':
+			case 'destroy':
+				return stream[property].bind(stream);
+			case 'writable':
+			case 'destroyed':
+				return stream[property];
+			case 'readable':
+				if (stream.destroyed) {
+					return false;
+				}
+
+				return stream.readable;
+			case 'setTimeout': {
+				const {session} = stream;
+				if (session !== undefined) {
+					return session.setTimeout.bind(session);
+				}
+
+				return stream.setTimeout.bind(stream);
+			}
+
+			case 'write':
+			case 'read':
+			case 'pause':
+			case 'resume':
+				throw new ERR_HTTP2_NO_SOCKET_MANIPULATION();
+			default: {
+				// Replaced [kSocket] with .socket
+				const reference = stream.session === undefined ? stream : stream.session.socket;
+				const value = reference[property];
+
+				return typeof value === 'function' ? value.bind(reference) : value;
+			}
+		}
+	},
+
+	getPrototypeOf(stream) {
+		if (stream.session !== undefined) {
+			// Replaced [kSocket] with .socket
+			return Reflect.getPrototypeOf(stream.session.socket);
+		}
+
+		return Reflect.getPrototypeOf(stream);
+	},
+
+	set(stream, property, value) {
+		switch (property) {
+			case 'writable':
+			case 'readable':
+			case 'destroyed':
+			case 'on':
+			case 'once':
+			case 'end':
+			case 'emit':
+			case 'destroy':
+				stream[property] = value;
+				return true;
+			case 'setTimeout': {
+				const {session} = stream;
+				if (session === undefined) {
+					stream.setTimeout = value;
+				} else {
+					session.setTimeout = value;
+				}
+
+				return true;
+			}
+
+			case 'write':
+			case 'read':
+			case 'pause':
+			case 'resume':
+				throw new ERR_HTTP2_NO_SOCKET_MANIPULATION();
+			default: {
+				// Replaced [kSocket] with .socket
+				const reference = stream.session === undefined ? stream : stream.session.socket;
+				reference[property] = value;
+				return true;
+			}
+		}
+	}
+};
+
+module.exports = proxySocketHandler;
+
+
+/***/ }),
+
+/***/ 311:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {ERR_INVALID_HTTP_TOKEN} = __nccwpck_require__(8108);
+const isRequestPseudoHeader = __nccwpck_require__(3986);
+
+const isValidHttpToken = /^[\^`\-\w!#$%&*+.|~]+$/;
+
+module.exports = name => {
+	if (typeof name !== 'string' || (!isValidHttpToken.test(name) && !isRequestPseudoHeader(name))) {
+		throw new ERR_INVALID_HTTP_TOKEN('Header name', name);
+	}
+};
+
+
+/***/ }),
+
+/***/ 2495:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const {
+	ERR_HTTP_INVALID_HEADER_VALUE,
+	ERR_INVALID_CHAR
+} = __nccwpck_require__(8108);
+
+const isInvalidHeaderValue = /[^\t\u0020-\u007E\u0080-\u00FF]/;
+
+module.exports = (name, value) => {
+	if (typeof value === 'undefined') {
+		throw new ERR_HTTP_INVALID_HEADER_VALUE(value, name);
+	}
+
+	if (isInvalidHeaderValue.test(value)) {
+		throw new ERR_INVALID_CHAR('header content', name);
+	}
+};
+
+
+/***/ }),
+
+/***/ 4469:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpsProxyAgent = void 0;
+const net = __importStar(__nccwpck_require__(9278));
+const tls = __importStar(__nccwpck_require__(4756));
+const assert_1 = __importDefault(__nccwpck_require__(2613));
+const debug_1 = __importDefault(__nccwpck_require__(8441));
+const agent_base_1 = __nccwpck_require__(3583);
+const url_1 = __nccwpck_require__(7016);
+const parse_proxy_response_1 = __nccwpck_require__(1031);
+const debug = (0, debug_1.default)('https-proxy-agent');
+/**
+ * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to
+ * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests.
+ *
+ * Outgoing HTTP requests are first tunneled through the proxy server using the
+ * `CONNECT` HTTP request method to establish a connection to the proxy server,
+ * and then the proxy server connects to the destination target and issues the
+ * HTTP request from the proxy server.
+ *
+ * `https:` requests have their socket connection upgraded to TLS once
+ * the connection to the proxy server has been established.
+ */
+class HttpsProxyAgent extends agent_base_1.Agent {
+    constructor(proxy, opts) {
+        super(opts);
+        this.options = { path: undefined };
+        this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy;
+        this.proxyHeaders = opts?.headers ?? {};
+        debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href);
+        // Trim off the brackets from IPv6 addresses
+        const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, '');
+        const port = this.proxy.port
+            ? parseInt(this.proxy.port, 10)
+            : this.proxy.protocol === 'https:'
+                ? 443
+                : 80;
+        this.connectOpts = {
+            // Attempt to negotiate http/1.1 for proxy servers that support http/2
+            ALPNProtocols: ['http/1.1'],
+            ...(opts ? omit(opts, 'headers') : null),
+            host,
+            port,
+        };
+    }
+    /**
+     * Called when the node-core HTTP client library is creating a
+     * new HTTP request.
+     */
+    async connect(req, opts) {
+        const { proxy } = this;
+        if (!opts.host) {
+            throw new TypeError('No "host" provided');
+        }
+        // Create a socket connection to the proxy server.
+        let socket;
+        if (proxy.protocol === 'https:') {
+            debug('Creating `tls.Socket`: %o', this.connectOpts);
+            const servername = this.connectOpts.servername || this.connectOpts.host;
+            socket = tls.connect({
+                ...this.connectOpts,
+                servername,
+            });
+        }
+        else {
+            debug('Creating `net.Socket`: %o', this.connectOpts);
+            socket = net.connect(this.connectOpts);
+        }
+        const headers = typeof this.proxyHeaders === 'function'
+            ? this.proxyHeaders()
+            : { ...this.proxyHeaders };
+        const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host;
+        let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`;
+        // Inject the `Proxy-Authorization` header if necessary.
+        if (proxy.username || proxy.password) {
+            const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
+            headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`;
+        }
+        headers.Host = `${host}:${opts.port}`;
+        if (!headers['Proxy-Connection']) {
+            headers['Proxy-Connection'] = this.keepAlive
+                ? 'Keep-Alive'
+                : 'close';
+        }
+        for (const name of Object.keys(headers)) {
+            payload += `${name}: ${headers[name]}\r\n`;
+        }
+        const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket);
+        socket.write(`${payload}\r\n`);
+        const { connect, buffered } = await proxyResponsePromise;
+        req.emit('proxyConnect', connect);
+        this.emit('proxyConnect', connect, req);
+        if (connect.statusCode === 200) {
+            req.once('socket', resume);
+            if (opts.secureEndpoint) {
+                // The proxy is connecting to a TLS server, so upgrade
+                // this socket connection to a TLS connection.
+                debug('Upgrading socket connection to TLS');
+                const servername = opts.servername || opts.host;
+                return tls.connect({
+                    ...omit(opts, 'host', 'path', 'port'),
+                    socket,
+                    servername,
+                });
+            }
+            return socket;
+        }
+        // Some other status code that's not 200... need to re-play the HTTP
+        // header "data" events onto the socket once the HTTP machinery is
+        // attached so that the node core `http` can parse and handle the
+        // error status code.
+        // Close the original socket, and a new "fake" socket is returned
+        // instead, so that the proxy doesn't get the HTTP request
+        // written to it (which may contain `Authorization` headers or other
+        // sensitive data).
+        //
+        // See: https://hackerone.com/reports/541502
+        socket.destroy();
+        const fakeSocket = new net.Socket({ writable: false });
+        fakeSocket.readable = true;
+        // Need to wait for the "socket" event to re-play the "data" events.
+        req.once('socket', (s) => {
+            debug('Replaying proxy buffer for failed request');
+            (0, assert_1.default)(s.listenerCount('data') > 0);
+            // Replay the "buffered" Buffer onto the fake `socket`, since at
+            // this point the HTTP module machinery has been hooked up for
+            // the user.
+            s.push(buffered);
+            s.push(null);
+        });
+        return fakeSocket;
+    }
+}
+HttpsProxyAgent.protocols = ['http', 'https'];
+exports.HttpsProxyAgent = HttpsProxyAgent;
+function resume(socket) {
+    socket.resume();
+}
+function omit(obj, ...keys) {
+    const ret = {};
+    let key;
+    for (key in obj) {
+        if (!keys.includes(key)) {
+            ret[key] = obj[key];
+        }
+    }
+    return ret;
+}
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 1031:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.parseProxyResponse = void 0;
+const debug_1 = __importDefault(__nccwpck_require__(8441));
+const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response');
+function parseProxyResponse(socket) {
+    return new Promise((resolve, reject) => {
+        // we need to buffer any HTTP traffic that happens with the proxy before we get
+        // the CONNECT response, so that if the response is anything other than an "200"
+        // response code, then we can re-play the "data" events on the socket once the
+        // HTTP parser is hooked up...
+        let buffersLength = 0;
+        const buffers = [];
+        function read() {
+            const b = socket.read();
+            if (b)
+                ondata(b);
+            else
+                socket.once('readable', read);
+        }
+        function cleanup() {
+            socket.removeListener('end', onend);
+            socket.removeListener('error', onerror);
+            socket.removeListener('readable', read);
+        }
+        function onend() {
+            cleanup();
+            debug('onend');
+            reject(new Error('Proxy connection ended before receiving CONNECT response'));
+        }
+        function onerror(err) {
+            cleanup();
+            debug('onerror %o', err);
+            reject(err);
+        }
+        function ondata(b) {
+            buffers.push(b);
+            buffersLength += b.length;
+            const buffered = Buffer.concat(buffers, buffersLength);
+            const endOfHeaders = buffered.indexOf('\r\n\r\n');
+            if (endOfHeaders === -1) {
+                // keep buffering
+                debug('have not received end of HTTP headers yet...');
+                read();
+                return;
+            }
+            const headerParts = buffered
+                .slice(0, endOfHeaders)
+                .toString('ascii')
+                .split('\r\n');
+            const firstLine = headerParts.shift();
+            if (!firstLine) {
+                socket.destroy();
+                return reject(new Error('No header received from proxy CONNECT response'));
+            }
+            const firstLineParts = firstLine.split(' ');
+            const statusCode = +firstLineParts[1];
+            const statusText = firstLineParts.slice(2).join(' ');
+            const headers = {};
+            for (const header of headerParts) {
+                if (!header)
+                    continue;
+                const firstColon = header.indexOf(':');
+                if (firstColon === -1) {
+                    socket.destroy();
+                    return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`));
+                }
+                const key = header.slice(0, firstColon).toLowerCase();
+                const value = header.slice(firstColon + 1).trimStart();
+                const current = headers[key];
+                if (typeof current === 'string') {
+                    headers[key] = [current, value];
+                }
+                else if (Array.isArray(current)) {
+                    current.push(value);
+                }
+                else {
+                    headers[key] = value;
+                }
+            }
+            debug('got proxy server response: %o %o', firstLine, headers);
+            cleanup();
+            resolve({
+                connect: {
+                    statusCode,
+                    statusText,
+                    headers,
+                },
+                buffered,
+            });
+        }
+        socket.on('error', onerror);
+        socket.on('end', onend);
+        read();
+    });
+}
+exports.parseProxyResponse = parseProxyResponse;
+//# sourceMappingURL=parse-proxy-response.js.map
+
+/***/ }),
+
+/***/ 9234:
+/***/ ((__unused_webpack_module, exports) => {
+
+//TODO: handle reviver/dehydrate function like normal
+//and handle indentation, like normal.
+//if anyone needs this... please send pull request.
+
+exports.stringify = function stringify (o) {
+  if('undefined' == typeof o) return o
+
+  if(o && Buffer.isBuffer(o))
+    return JSON.stringify(':base64:' + o.toString('base64'))
+
+  if(o && o.toJSON)
+    o =  o.toJSON()
+
+  if(o && 'object' === typeof o) {
+    var s = ''
+    var array = Array.isArray(o)
+    s = array ? '[' : '{'
+    var first = true
+
+    for(var k in o) {
+      var ignore = 'function' == typeof o[k] || (!array && 'undefined' === typeof o[k])
+      if(Object.hasOwnProperty.call(o, k) && !ignore) {
+        if(!first)
+          s += ','
+        first = false
+        if (array) {
+          if(o[k] == undefined)
+            s += 'null'
+          else
+            s += stringify(o[k])
+        } else if (o[k] !== void(0)) {
+          s += stringify(k) + ':' + stringify(o[k])
+        }
+      }
+    }
+
+    s += array ? ']' : '}'
+
+    return s
+  } else if ('string' === typeof o) {
+    return JSON.stringify(/^:/.test(o) ? ':' + o : o)
+  } else if ('undefined' === typeof o) {
+    return 'null';
+  } else
+    return JSON.stringify(o)
+}
+
+exports.parse = function (s) {
+  return JSON.parse(s, function (key, value) {
+    if('string' === typeof value) {
+      if(/^:base64:/.test(value))
+        return Buffer.from(value.substring(8), 'base64')
+      else
+        return /^:/.test(value) ? value.substring(1) : value 
+    }
+    return value
+  })
+}
+
+
+/***/ }),
+
+/***/ 8832:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const EventEmitter = __nccwpck_require__(4434);
+const JSONB = __nccwpck_require__(9234);
+
+const loadStore = options => {
+	const adapters = {
+		redis: '@keyv/redis',
+		rediss: '@keyv/redis',
+		mongodb: '@keyv/mongo',
+		mongo: '@keyv/mongo',
+		sqlite: '@keyv/sqlite',
+		postgresql: '@keyv/postgres',
+		postgres: '@keyv/postgres',
+		mysql: '@keyv/mysql',
+		etcd: '@keyv/etcd',
+		offline: '@keyv/offline',
+		tiered: '@keyv/tiered',
+	};
+	if (options.adapter || options.uri) {
+		const adapter = options.adapter || /^[^:+]*/.exec(options.uri)[0];
+		return new (__WEBPACK_EXTERNAL_createRequire(import.meta.url)(adapters[adapter]))(options);
+	}
+
+	return new Map();
+};
+
+const iterableAdapters = [
+	'sqlite',
+	'postgres',
+	'mysql',
+	'mongo',
+	'redis',
+	'tiered',
+];
+
+class Keyv extends EventEmitter {
+	constructor(uri, {emitErrors = true, ...options} = {}) {
+		super();
+		this.opts = {
+			namespace: 'keyv',
+			serialize: JSONB.stringify,
+			deserialize: JSONB.parse,
+			...((typeof uri === 'string') ? {uri} : uri),
+			...options,
+		};
+
+		if (!this.opts.store) {
+			const adapterOptions = {...this.opts};
+			this.opts.store = loadStore(adapterOptions);
+		}
+
+		if (this.opts.compression) {
+			const compression = this.opts.compression;
+			this.opts.serialize = compression.serialize.bind(compression);
+			this.opts.deserialize = compression.deserialize.bind(compression);
+		}
+
+		if (typeof this.opts.store.on === 'function' && emitErrors) {
+			this.opts.store.on('error', error => this.emit('error', error));
+		}
+
+		this.opts.store.namespace = this.opts.namespace;
+
+		const generateIterator = iterator => async function * () {
+			for await (const [key, raw] of typeof iterator === 'function'
+				? iterator(this.opts.store.namespace)
+				: iterator) {
+				const data = await this.opts.deserialize(raw);
+				if (this.opts.store.namespace && !key.includes(this.opts.store.namespace)) {
+					continue;
+				}
+
+				if (typeof data.expires === 'number' && Date.now() > data.expires) {
+					this.delete(key);
+					continue;
+				}
+
+				yield [this._getKeyUnprefix(key), data.value];
+			}
+		};
+
+		// Attach iterators
+		if (typeof this.opts.store[Symbol.iterator] === 'function' && this.opts.store instanceof Map) {
+			this.iterator = generateIterator(this.opts.store);
+		} else if (typeof this.opts.store.iterator === 'function' && this.opts.store.opts
+			&& this._checkIterableAdaptar()) {
+			this.iterator = generateIterator(this.opts.store.iterator.bind(this.opts.store));
+		}
+	}
+
+	_checkIterableAdaptar() {
+		return iterableAdapters.includes(this.opts.store.opts.dialect)
+			|| iterableAdapters.findIndex(element => this.opts.store.opts.url.includes(element)) >= 0;
+	}
+
+	_getKeyPrefix(key) {
+		return `${this.opts.namespace}:${key}`;
+	}
+
+	_getKeyPrefixArray(keys) {
+		return keys.map(key => `${this.opts.namespace}:${key}`);
+	}
+
+	_getKeyUnprefix(key) {
+		return key
+			.split(':')
+			.splice(1)
+			.join(':');
+	}
+
+	get(key, options) {
+		const {store} = this.opts;
+		const isArray = Array.isArray(key);
+		const keyPrefixed = isArray ? this._getKeyPrefixArray(key) : this._getKeyPrefix(key);
+		if (isArray && store.getMany === undefined) {
+			const promises = [];
+			for (const key of keyPrefixed) {
+				promises.push(Promise.resolve()
+					.then(() => store.get(key))
+					.then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data))
+					.then(data => {
+						if (data === undefined || data === null) {
+							return undefined;
+						}
+
+						if (typeof data.expires === 'number' && Date.now() > data.expires) {
+							return this.delete(key).then(() => undefined);
+						}
+
+						return (options && options.raw) ? data : data.value;
+					}),
+				);
+			}
+
+			return Promise.allSettled(promises)
+				.then(values => {
+					const data = [];
+					for (const value of values) {
+						data.push(value.value);
+					}
+
+					return data;
+				});
+		}
+
+		return Promise.resolve()
+			.then(() => isArray ? store.getMany(keyPrefixed) : store.get(keyPrefixed))
+			.then(data => (typeof data === 'string') ? this.opts.deserialize(data) : (this.opts.compression ? this.opts.deserialize(data) : data))
+			.then(data => {
+				if (data === undefined || data === null) {
+					return undefined;
+				}
+
+				if (isArray) {
+					return data.map((row, index) => {
+						if ((typeof row === 'string')) {
+							row = this.opts.deserialize(row);
+						}
+
+						if (row === undefined || row === null) {
+							return undefined;
+						}
+
+						if (typeof row.expires === 'number' && Date.now() > row.expires) {
+							this.delete(key[index]).then(() => undefined);
+							return undefined;
+						}
+
+						return (options && options.raw) ? row : row.value;
+					});
+				}
+
+				if (typeof data.expires === 'number' && Date.now() > data.expires) {
+					return this.delete(key).then(() => undefined);
+				}
+
+				return (options && options.raw) ? data : data.value;
+			});
+	}
+
+	set(key, value, ttl) {
+		const keyPrefixed = this._getKeyPrefix(key);
+		if (typeof ttl === 'undefined') {
+			ttl = this.opts.ttl;
+		}
+
+		if (ttl === 0) {
+			ttl = undefined;
+		}
+
+		const {store} = this.opts;
+
+		return Promise.resolve()
+			.then(() => {
+				const expires = (typeof ttl === 'number') ? (Date.now() + ttl) : null;
+				if (typeof value === 'symbol') {
+					this.emit('error', 'symbol cannot be serialized');
+				}
+
+				value = {value, expires};
+				return this.opts.serialize(value);
+			})
+			.then(value => store.set(keyPrefixed, value, ttl))
+			.then(() => true);
+	}
+
+	delete(key) {
+		const {store} = this.opts;
+		if (Array.isArray(key)) {
+			const keyPrefixed = this._getKeyPrefixArray(key);
+			if (store.deleteMany === undefined) {
+				const promises = [];
+				for (const key of keyPrefixed) {
+					promises.push(store.delete(key));
+				}
+
+				return Promise.allSettled(promises)
+					.then(values => values.every(x => x.value === true));
+			}
+
+			return Promise.resolve()
+				.then(() => store.deleteMany(keyPrefixed));
+		}
+
+		const keyPrefixed = this._getKeyPrefix(key);
+		return Promise.resolve()
+			.then(() => store.delete(keyPrefixed));
+	}
+
+	clear() {
+		const {store} = this.opts;
+		return Promise.resolve()
+			.then(() => store.clear());
+	}
+
+	has(key) {
+		const keyPrefixed = this._getKeyPrefix(key);
+		const {store} = this.opts;
+		return Promise.resolve()
+			.then(async () => {
+				if (typeof store.has === 'function') {
+					return store.has(keyPrefixed);
+				}
+
+				const value = await store.get(keyPrefixed);
+				return value !== undefined;
+			});
+	}
+
+	disconnect() {
+		const {store} = this.opts;
+		if (typeof store.disconnect === 'function') {
+			return store.disconnect();
+		}
+	}
+}
+
+module.exports = Keyv;
+
+
+/***/ }),
+
+/***/ 9804:
+/***/ ((module) => {
+
+
+
+// We define these manually to ensure they're always copied
+// even if they would move up the prototype chain
+// https://nodejs.org/api/http.html#http_class_http_incomingmessage
+const knownProperties = [
+	'aborted',
+	'complete',
+	'headers',
+	'httpVersion',
+	'httpVersionMinor',
+	'httpVersionMajor',
+	'method',
+	'rawHeaders',
+	'rawTrailers',
+	'setTimeout',
+	'socket',
+	'statusCode',
+	'statusMessage',
+	'trailers',
+	'url'
+];
+
+module.exports = (fromStream, toStream) => {
+	if (toStream._readableState.autoDestroy) {
+		throw new Error('The second stream must have the `autoDestroy` option set to `false`');
+	}
+
+	const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties));
+
+	const properties = {};
+
+	for (const property of fromProperties) {
+		// Don't overwrite existing properties.
+		if (property in toStream) {
+			continue;
+		}
+
+		properties[property] = {
+			get() {
+				const value = fromStream[property];
+				const isFunction = typeof value === 'function';
+
+				return isFunction ? value.bind(fromStream) : value;
+			},
+			set(value) {
+				fromStream[property] = value;
+			},
+			enumerable: true,
+			configurable: false
+		};
+	}
+
+	Object.defineProperties(toStream, properties);
+
+	fromStream.once('aborted', () => {
+		toStream.destroy();
+
+		toStream.emit('aborted');
+	});
+
+	fromStream.once('close', () => {
+		if (fromStream.complete) {
+			if (toStream.readable) {
+				toStream.once('end', () => {
+					toStream.emit('close');
+				});
+			} else {
+				toStream.emit('close');
+			}
+		} else {
+			toStream.emit('close');
+		}
+	});
+
+	return toStream;
+};
+
+
+/***/ }),
+
+/***/ 6274:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = minimatch
+minimatch.Minimatch = Minimatch
+
+var path = (function () { try { return __nccwpck_require__(6928) } catch (e) {}}()) || {
+  sep: '/'
+}
+minimatch.sep = path.sep
+
+var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
+var expand = __nccwpck_require__(5612)
+
+var plTypes = {
+  '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
+  '?': { open: '(?:', close: ')?' },
+  '+': { open: '(?:', close: ')+' },
+  '*': { open: '(?:', close: ')*' },
+  '@': { open: '(?:', close: ')' }
+}
+
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
+
+// * => any number of characters
+var star = qmark + '*?'
+
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
+
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
+
+// "abc" -> { a:true, b:true, c:true }
+function charSet (s) {
+  return s.split('').reduce(function (set, c) {
+    set[c] = true
+    return set
+  }, {})
+}
+
+// normalizes slashes.
+var slashSplit = /\/+/
+
+minimatch.filter = filter
+function filter (pattern, options) {
+  options = options || {}
+  return function (p, i, list) {
+    return minimatch(p, pattern, options)
+  }
+}
+
+function ext (a, b) {
+  b = b || {}
+  var t = {}
+  Object.keys(a).forEach(function (k) {
+    t[k] = a[k]
+  })
+  Object.keys(b).forEach(function (k) {
+    t[k] = b[k]
+  })
+  return t
+}
+
+minimatch.defaults = function (def) {
+  if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+    return minimatch
+  }
+
+  var orig = minimatch
+
+  var m = function minimatch (p, pattern, options) {
+    return orig(p, pattern, ext(def, options))
+  }
+
+  m.Minimatch = function Minimatch (pattern, options) {
+    return new orig.Minimatch(pattern, ext(def, options))
+  }
+  m.Minimatch.defaults = function defaults (options) {
+    return orig.defaults(ext(def, options)).Minimatch
+  }
+
+  m.filter = function filter (pattern, options) {
+    return orig.filter(pattern, ext(def, options))
+  }
+
+  m.defaults = function defaults (options) {
+    return orig.defaults(ext(def, options))
+  }
+
+  m.makeRe = function makeRe (pattern, options) {
+    return orig.makeRe(pattern, ext(def, options))
+  }
+
+  m.braceExpand = function braceExpand (pattern, options) {
+    return orig.braceExpand(pattern, ext(def, options))
+  }
+
+  m.match = function (list, pattern, options) {
+    return orig.match(list, pattern, ext(def, options))
+  }
+
+  return m
+}
+
+Minimatch.defaults = function (def) {
+  return minimatch.defaults(def).Minimatch
+}
+
+function minimatch (p, pattern, options) {
+  assertValidPattern(pattern)
+
+  if (!options) options = {}
+
+  // shortcut: comments match nothing.
+  if (!options.nocomment && pattern.charAt(0) === '#') {
+    return false
+  }
+
+  return new Minimatch(pattern, options).match(p)
+}
+
+function Minimatch (pattern, options) {
+  if (!(this instanceof Minimatch)) {
+    return new Minimatch(pattern, options)
+  }
+
+  assertValidPattern(pattern)
+
+  if (!options) options = {}
+
+  pattern = pattern.trim()
+
+  // windows support: need to use /, not \
+  if (!options.allowWindowsEscape && path.sep !== '/') {
+    pattern = pattern.split(path.sep).join('/')
+  }
+
+  this.options = options
+  this.set = []
+  this.pattern = pattern
+  this.regexp = null
+  this.negate = false
+  this.comment = false
+  this.empty = false
+  this.partial = !!options.partial
+
+  // make the set of regexps etc.
+  this.make()
+}
+
+Minimatch.prototype.debug = function () {}
+
+Minimatch.prototype.make = make
+function make () {
+  var pattern = this.pattern
+  var options = this.options
+
+  // empty patterns and comments match nothing.
+  if (!options.nocomment && pattern.charAt(0) === '#') {
+    this.comment = true
+    return
+  }
+  if (!pattern) {
+    this.empty = true
+    return
+  }
+
+  // step 1: figure out negation, etc.
+  this.parseNegate()
+
+  // step 2: expand braces
+  var set = this.globSet = this.braceExpand()
+
+  if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
+
+  this.debug(this.pattern, set)
+
+  // step 3: now we have a set, so turn each one into a series of path-portion
+  // matching patterns.
+  // These will be regexps, except in the case of "**", which is
+  // set to the GLOBSTAR object for globstar behavior,
+  // and will not contain any / characters
+  set = this.globParts = set.map(function (s) {
+    return s.split(slashSplit)
+  })
+
+  this.debug(this.pattern, set)
+
+  // glob --> regexps
+  set = set.map(function (s, si, set) {
+    return s.map(this.parse, this)
+  }, this)
+
+  this.debug(this.pattern, set)
+
+  // filter out everything that didn't compile properly.
+  set = set.filter(function (s) {
+    return s.indexOf(false) === -1
+  })
+
+  this.debug(this.pattern, set)
+
+  this.set = set
+}
+
+Minimatch.prototype.parseNegate = parseNegate
+function parseNegate () {
+  var pattern = this.pattern
+  var negate = false
+  var options = this.options
+  var negateOffset = 0
+
+  if (options.nonegate) return
+
+  for (var i = 0, l = pattern.length
+    ; i < l && pattern.charAt(i) === '!'
+    ; i++) {
+    negate = !negate
+    negateOffset++
+  }
+
+  if (negateOffset) this.pattern = pattern.substr(negateOffset)
+  this.negate = negate
+}
+
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = function (pattern, options) {
+  return braceExpand(pattern, options)
+}
+
+Minimatch.prototype.braceExpand = braceExpand
+
+function braceExpand (pattern, options) {
+  if (!options) {
+    if (this instanceof Minimatch) {
+      options = this.options
+    } else {
+      options = {}
+    }
+  }
+
+  pattern = typeof pattern === 'undefined'
+    ? this.pattern : pattern
+
+  assertValidPattern(pattern)
+
+  // Thanks to Yeting Li <https://github.com/yetingli> for
+  // improving this regexp to avoid a ReDOS vulnerability.
+  if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+    // shortcut. no need to expand.
+    return [pattern]
+  }
+
+  return expand(pattern)
+}
+
+var MAX_PATTERN_LENGTH = 1024 * 64
+var assertValidPattern = function (pattern) {
+  if (typeof pattern !== 'string') {
+    throw new TypeError('invalid pattern')
+  }
+
+  if (pattern.length > MAX_PATTERN_LENGTH) {
+    throw new TypeError('pattern is too long')
+  }
+}
+
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+Minimatch.prototype.parse = parse
+var SUBPARSE = {}
+function parse (pattern, isSub) {
+  assertValidPattern(pattern)
+
+  var options = this.options
+
+  // shortcuts
+  if (pattern === '**') {
+    if (!options.noglobstar)
+      return GLOBSTAR
+    else
+      pattern = '*'
+  }
+  if (pattern === '') return ''
+
+  var re = ''
+  var hasMagic = !!options.nocase
+  var escaping = false
+  // ? => one single character
+  var patternListStack = []
+  var negativeLists = []
+  var stateChar
+  var inClass = false
+  var reClassStart = -1
+  var classStart = -1
+  // . and .. never match anything that doesn't start with .,
+  // even when options.dot is set.
+  var patternStart = pattern.charAt(0) === '.' ? '' // anything
+  // not (start or / followed by . or .. followed by / or end)
+  : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+  : '(?!\\.)'
+  var self = this
+
+  function clearStateChar () {
+    if (stateChar) {
+      // we had some state-tracking character
+      // that wasn't consumed by this pass.
+      switch (stateChar) {
+        case '*':
+          re += star
+          hasMagic = true
+        break
+        case '?':
+          re += qmark
+          hasMagic = true
+        break
+        default:
+          re += '\\' + stateChar
+        break
+      }
+      self.debug('clearStateChar %j %j', stateChar, re)
+      stateChar = false
+    }
+  }
+
+  for (var i = 0, len = pattern.length, c
+    ; (i < len) && (c = pattern.charAt(i))
+    ; i++) {
+    this.debug('%s\t%s %s %j', pattern, i, re, c)
+
+    // skip over any that are escaped.
+    if (escaping && reSpecials[c]) {
+      re += '\\' + c
+      escaping = false
+      continue
+    }
+
+    switch (c) {
+      /* istanbul ignore next */
+      case '/': {
+        // completely not allowed, even escaped.
+        // Should already be path-split by now.
+        return false
+      }
+
+      case '\\':
+        clearStateChar()
+        escaping = true
+      continue
+
+      // the various stateChar values
+      // for the "extglob" stuff.
+      case '?':
+      case '*':
+      case '+':
+      case '@':
+      case '!':
+        this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+
+        // all of those are literals inside a class, except that
+        // the glob [!a] means [^a] in regexp
+        if (inClass) {
+          this.debug('  in class')
+          if (c === '!' && i === classStart + 1) c = '^'
+          re += c
+          continue
+        }
+
+        // if we already have a stateChar, then it means
+        // that there was something like ** or +? in there.
+        // Handle the stateChar, then proceed with this one.
+        self.debug('call clearStateChar %j', stateChar)
+        clearStateChar()
+        stateChar = c
+        // if extglob is disabled, then +(asdf|foo) isn't a thing.
+        // just clear the statechar *now*, rather than even diving into
+        // the patternList stuff.
+        if (options.noext) clearStateChar()
+      continue
+
+      case '(':
+        if (inClass) {
+          re += '('
+          continue
+        }
+
+        if (!stateChar) {
+          re += '\\('
+          continue
+        }
+
+        patternListStack.push({
+          type: stateChar,
+          start: i - 1,
+          reStart: re.length,
+          open: plTypes[stateChar].open,
+          close: plTypes[stateChar].close
+        })
+        // negation is (?:(?!js)[^/]*)
+        re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
+        this.debug('plType %j %j', stateChar, re)
+        stateChar = false
+      continue
+
+      case ')':
+        if (inClass || !patternListStack.length) {
+          re += '\\)'
+          continue
+        }
+
+        clearStateChar()
+        hasMagic = true
+        var pl = patternListStack.pop()
+        // negation is (?:(?!js)[^/]*)
+        // The others are (?:<pattern>)<type>
+        re += pl.close
+        if (pl.type === '!') {
+          negativeLists.push(pl)
+        }
+        pl.reEnd = re.length
+      continue
+
+      case '|':
+        if (inClass || !patternListStack.length || escaping) {
+          re += '\\|'
+          escaping = false
+          continue
+        }
+
+        clearStateChar()
+        re += '|'
+      continue
+
+      // these are mostly the same in regexp and glob
+      case '[':
+        // swallow any state-tracking char before the [
+        clearStateChar()
+
+        if (inClass) {
+          re += '\\' + c
+          continue
+        }
+
+        inClass = true
+        classStart = i
+        reClassStart = re.length
+        re += c
+      continue
+
+      case ']':
+        //  a right bracket shall lose its special
+        //  meaning and represent itself in
+        //  a bracket expression if it occurs
+        //  first in the list.  -- POSIX.2 2.8.3.2
+        if (i === classStart + 1 || !inClass) {
+          re += '\\' + c
+          escaping = false
+          continue
+        }
+
+        // handle the case where we left a class open.
+        // "[z-a]" is valid, equivalent to "\[z-a\]"
+        // split where the last [ was, make sure we don't have
+        // an invalid re. if so, re-walk the contents of the
+        // would-be class to re-translate any characters that
+        // were passed through as-is
+        // TODO: It would probably be faster to determine this
+        // without a try/catch and a new RegExp, but it's tricky
+        // to do safely.  For now, this is safe and works.
+        var cs = pattern.substring(classStart + 1, i)
+        try {
+          RegExp('[' + cs + ']')
+        } catch (er) {
+          // not a valid class!
+          var sp = this.parse(cs, SUBPARSE)
+          re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
+          hasMagic = hasMagic || sp[1]
+          inClass = false
+          continue
+        }
+
+        // finish up the class.
+        hasMagic = true
+        inClass = false
+        re += c
+      continue
+
+      default:
+        // swallow any state char that wasn't consumed
+        clearStateChar()
+
+        if (escaping) {
+          // no need
+          escaping = false
+        } else if (reSpecials[c]
+          && !(c === '^' && inClass)) {
+          re += '\\'
+        }
+
+        re += c
+
+    } // switch
+  } // for
+
+  // handle the case where we left a class open.
+  // "[abc" is valid, equivalent to "\[abc"
+  if (inClass) {
+    // split where the last [ was, and escape it
+    // this is a huge pita.  We now have to re-walk
+    // the contents of the would-be class to re-translate
+    // any characters that were passed through as-is
+    cs = pattern.substr(classStart + 1)
+    sp = this.parse(cs, SUBPARSE)
+    re = re.substr(0, reClassStart) + '\\[' + sp[0]
+    hasMagic = hasMagic || sp[1]
+  }
+
+  // handle the case where we had a +( thing at the *end*
+  // of the pattern.
+  // each pattern list stack adds 3 chars, and we need to go through
+  // and escape any | chars that were passed through as-is for the regexp.
+  // Go through and escape them, taking care not to double-escape any
+  // | chars that were already escaped.
+  for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+    var tail = re.slice(pl.reStart + pl.open.length)
+    this.debug('setting tail', re, pl)
+    // maybe some even number of \, then maybe 1 \, followed by a |
+    tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
+      if (!$2) {
+        // the | isn't already escaped, so escape it.
+        $2 = '\\'
+      }
+
+      // need to escape all those slashes *again*, without escaping the
+      // one that we need for escaping the | character.  As it works out,
+      // escaping an even number of slashes can be done by simply repeating
+      // it exactly after itself.  That's why this trick works.
+      //
+      // I am sorry that you have to see this.
+      return $1 + $1 + $2 + '|'
+    })
+
+    this.debug('tail=%j\n   %s', tail, tail, pl, re)
+    var t = pl.type === '*' ? star
+      : pl.type === '?' ? qmark
+      : '\\' + pl.type
+
+    hasMagic = true
+    re = re.slice(0, pl.reStart) + t + '\\(' + tail
+  }
+
+  // handle trailing things that only matter at the very end.
+  clearStateChar()
+  if (escaping) {
+    // trailing \\
+    re += '\\\\'
+  }
+
+  // only need to apply the nodot start if the re starts with
+  // something that could conceivably capture a dot
+  var addPatternStart = false
+  switch (re.charAt(0)) {
+    case '[': case '.': case '(': addPatternStart = true
+  }
+
+  // Hack to work around lack of negative lookbehind in JS
+  // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+  // like 'a.xyz.yz' doesn't match.  So, the first negative
+  // lookahead, has to look ALL the way ahead, to the end of
+  // the pattern.
+  for (var n = negativeLists.length - 1; n > -1; n--) {
+    var nl = negativeLists[n]
+
+    var nlBefore = re.slice(0, nl.reStart)
+    var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+    var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+    var nlAfter = re.slice(nl.reEnd)
+
+    nlLast += nlAfter
+
+    // Handle nested stuff like *(*.js|!(*.json)), where open parens
+    // mean that we should *not* include the ) in the bit that is considered
+    // "after" the negated section.
+    var openParensBefore = nlBefore.split('(').length - 1
+    var cleanAfter = nlAfter
+    for (i = 0; i < openParensBefore; i++) {
+      cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+    }
+    nlAfter = cleanAfter
+
+    var dollar = ''
+    if (nlAfter === '' && isSub !== SUBPARSE) {
+      dollar = '$'
+    }
+    var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+    re = newRe
+  }
+
+  // if the re is not "" at this point, then we need to make sure
+  // it doesn't match against an empty path part.
+  // Otherwise a/* will match a/, which it should not.
+  if (re !== '' && hasMagic) {
+    re = '(?=.)' + re
+  }
+
+  if (addPatternStart) {
+    re = patternStart + re
+  }
+
+  // parsing just a piece of a larger pattern.
+  if (isSub === SUBPARSE) {
+    return [re, hasMagic]
+  }
+
+  // skip the regexp for non-magical patterns
+  // unescape anything in it, though, so that it'll be
+  // an exact match against a file etc.
+  if (!hasMagic) {
+    return globUnescape(pattern)
+  }
+
+  var flags = options.nocase ? 'i' : ''
+  try {
+    var regExp = new RegExp('^' + re + '$', flags)
+  } catch (er) /* istanbul ignore next - should be impossible */ {
+    // If it was an invalid regular expression, then it can't match
+    // anything.  This trick looks for a character after the end of
+    // the string, which is of course impossible, except in multi-line
+    // mode, but it's not a /m regex.
+    return new RegExp('$.')
+  }
+
+  regExp._glob = pattern
+  regExp._src = re
+
+  return regExp
+}
+
+minimatch.makeRe = function (pattern, options) {
+  return new Minimatch(pattern, options || {}).makeRe()
+}
+
+Minimatch.prototype.makeRe = makeRe
+function makeRe () {
+  if (this.regexp || this.regexp === false) return this.regexp
+
+  // at this point, this.set is a 2d array of partial
+  // pattern strings, or "**".
+  //
+  // It's better to use .match().  This function shouldn't
+  // be used, really, but it's pretty convenient sometimes,
+  // when you just want to work with a regex.
+  var set = this.set
+
+  if (!set.length) {
+    this.regexp = false
+    return this.regexp
+  }
+  var options = this.options
+
+  var twoStar = options.noglobstar ? star
+    : options.dot ? twoStarDot
+    : twoStarNoDot
+  var flags = options.nocase ? 'i' : ''
+
+  var re = set.map(function (pattern) {
+    return pattern.map(function (p) {
+      return (p === GLOBSTAR) ? twoStar
+      : (typeof p === 'string') ? regExpEscape(p)
+      : p._src
+    }).join('\\\/')
+  }).join('|')
+
+  // must match entire pattern
+  // ending in a * or ** will make it less strict.
+  re = '^(?:' + re + ')$'
+
+  // can match anything, as long as it's not this.
+  if (this.negate) re = '^(?!' + re + ').*$'
+
+  try {
+    this.regexp = new RegExp(re, flags)
+  } catch (ex) /* istanbul ignore next - should be impossible */ {
+    this.regexp = false
+  }
+  return this.regexp
+}
+
+minimatch.match = function (list, pattern, options) {
+  options = options || {}
+  var mm = new Minimatch(pattern, options)
+  list = list.filter(function (f) {
+    return mm.match(f)
+  })
+  if (mm.options.nonull && !list.length) {
+    list.push(pattern)
+  }
+  return list
+}
+
+Minimatch.prototype.match = function match (f, partial) {
+  if (typeof partial === 'undefined') partial = this.partial
+  this.debug('match', f, this.pattern)
+  // short-circuit in the case of busted things.
+  // comments, etc.
+  if (this.comment) return false
+  if (this.empty) return f === ''
+
+  if (f === '/' && partial) return true
+
+  var options = this.options
+
+  // windows: need to use /, not \
+  if (path.sep !== '/') {
+    f = f.split(path.sep).join('/')
+  }
+
+  // treat the test path as a set of pathparts.
+  f = f.split(slashSplit)
+  this.debug(this.pattern, 'split', f)
+
+  // just ONE of the pattern sets in this.set needs to match
+  // in order for it to be valid.  If negating, then just one
+  // match means that we have failed.
+  // Either way, return on the first hit.
+
+  var set = this.set
+  this.debug(this.pattern, 'set', set)
+
+  // Find the basename of the path by looking for the last non-empty segment
+  var filename
+  var i
+  for (i = f.length - 1; i >= 0; i--) {
+    filename = f[i]
+    if (filename) break
+  }
+
+  for (i = 0; i < set.length; i++) {
+    var pattern = set[i]
+    var file = f
+    if (options.matchBase && pattern.length === 1) {
+      file = [filename]
+    }
+    var hit = this.matchOne(file, pattern, partial)
+    if (hit) {
+      if (options.flipNegate) return true
+      return !this.negate
+    }
+  }
+
+  // didn't get any hits.  this is success if it's a negative
+  // pattern, failure otherwise.
+  if (options.flipNegate) return false
+  return this.negate
+}
+
+// set partial to true to test if, for example,
+// "/a/b" matches the start of "/*/b/*/d"
+// Partial means, if you run out of file before you run
+// out of pattern, then that's fine, as long as all
+// the parts match.
+Minimatch.prototype.matchOne = function (file, pattern, partial) {
+  var options = this.options
+
+  this.debug('matchOne',
+    { 'this': this, file: file, pattern: pattern })
+
+  this.debug('matchOne', file.length, pattern.length)
+
+  for (var fi = 0,
+      pi = 0,
+      fl = file.length,
+      pl = pattern.length
+      ; (fi < fl) && (pi < pl)
+      ; fi++, pi++) {
+    this.debug('matchOne loop')
+    var p = pattern[pi]
+    var f = file[fi]
+
+    this.debug(pattern, p, f)
+
+    // should be impossible.
+    // some invalid regexp stuff in the set.
+    /* istanbul ignore if */
+    if (p === false) return false
+
+    if (p === GLOBSTAR) {
+      this.debug('GLOBSTAR', [pattern, p, f])
+
+      // "**"
+      // a/**/b/**/c would match the following:
+      // a/b/x/y/z/c
+      // a/x/y/z/b/c
+      // a/b/x/b/x/c
+      // a/b/c
+      // To do this, take the rest of the pattern after
+      // the **, and see if it would match the file remainder.
+      // If so, return success.
+      // If not, the ** "swallows" a segment, and try again.
+      // This is recursively awful.
+      //
+      // a/**/b/**/c matching a/b/x/y/z/c
+      // - a matches a
+      // - doublestar
+      //   - matchOne(b/x/y/z/c, b/**/c)
+      //     - b matches b
+      //     - doublestar
+      //       - matchOne(x/y/z/c, c) -> no
+      //       - matchOne(y/z/c, c) -> no
+      //       - matchOne(z/c, c) -> no
+      //       - matchOne(c, c) yes, hit
+      var fr = fi
+      var pr = pi + 1
+      if (pr === pl) {
+        this.debug('** at the end')
+        // a ** at the end will just swallow the rest.
+        // We have found a match.
+        // however, it will not swallow /.x, unless
+        // options.dot is set.
+        // . and .. are *never* matched by **, for explosively
+        // exponential reasons.
+        for (; fi < fl; fi++) {
+          if (file[fi] === '.' || file[fi] === '..' ||
+            (!options.dot && file[fi].charAt(0) === '.')) return false
+        }
+        return true
+      }
+
+      // ok, let's see if we can swallow whatever we can.
+      while (fr < fl) {
+        var swallowee = file[fr]
+
+        this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+
+        // XXX remove this slice.  Just pass the start index.
+        if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+          this.debug('globstar found match!', fr, fl, swallowee)
+          // found a match.
+          return true
+        } else {
+          // can't swallow "." or ".." ever.
+          // can only swallow ".foo" when explicitly asked.
+          if (swallowee === '.' || swallowee === '..' ||
+            (!options.dot && swallowee.charAt(0) === '.')) {
+            this.debug('dot detected!', file, fr, pattern, pr)
+            break
+          }
+
+          // ** swallows a segment, and continue.
+          this.debug('globstar swallow a segment, and continue')
+          fr++
+        }
+      }
+
+      // no match was found.
+      // However, in partial mode, we can't say this is necessarily over.
+      // If there's more *pattern* left, then
+      /* istanbul ignore if */
+      if (partial) {
+        // ran out of file
+        this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
+        if (fr === fl) return true
+      }
+      return false
+    }
+
+    // something other than **
+    // non-magic patterns just have to match exactly
+    // patterns with magic have been turned into regexps.
+    var hit
+    if (typeof p === 'string') {
+      hit = f === p
+      this.debug('string match', p, f, hit)
+    } else {
+      hit = f.match(p)
+      this.debug('pattern match', p, f, hit)
+    }
+
+    if (!hit) return false
+  }
+
+  // Note: ending in / means that we'll get a final ""
+  // at the end of the pattern.  This can only match a
+  // corresponding "" at the end of the file.
+  // If the file ends in /, then it can only match a
+  // a pattern that ends in /, unless the pattern just
+  // doesn't have any more for it. But, a/b/ should *not*
+  // match "a/b/*", even though "" matches against the
+  // [^/]*? pattern, except in partial mode, where it might
+  // simply not be reached yet.
+  // However, a/b/ should still satisfy a/*
+
+  // now either we fell off the end of the pattern, or we're done.
+  if (fi === fl && pi === pl) {
+    // ran out of pattern and filename at the same time.
+    // an exact hit!
+    return true
+  } else if (fi === fl) {
+    // ran out of file, but still had pattern left.
+    // this is ok if we're doing the match as part of
+    // a glob fs traversal.
+    return partial
+  } else /* istanbul ignore else */ if (pi === pl) {
+    // ran out of pattern, still have file left.
+    // this is only acceptable if we're on the very last
+    // empty segment of a file with a trailing slash.
+    // a/* should match a/b/
+    return (fi === fl - 1) && (file[fi] === '')
+  }
+
+  // should be unreachable.
+  /* istanbul ignore next */
+  throw new Error('wtf?')
+}
+
+// replace stuff like \* with *
+function globUnescape (s) {
+  return s.replace(/\\(.)/g, '$1')
+}
+
+function regExpEscape (s) {
+  return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
+}
+
+
+/***/ }),
+
+/***/ 2134:
+/***/ ((module) => {
+
+/**
+ * Helpers.
+ */
+
+var s = 1000;
+var m = s * 60;
+var h = m * 60;
+var d = h * 24;
+var w = d * 7;
+var y = d * 365.25;
+
+/**
+ * Parse or format the given `val`.
+ *
+ * Options:
+ *
+ *  - `long` verbose formatting [false]
+ *
+ * @param {String|Number} val
+ * @param {Object} [options]
+ * @throws {Error} throw an error if val is not a non-empty string or a number
+ * @return {String|Number}
+ * @api public
+ */
+
+module.exports = function (val, options) {
+  options = options || {};
+  var type = typeof val;
+  if (type === 'string' && val.length > 0) {
+    return parse(val);
+  } else if (type === 'number' && isFinite(val)) {
+    return options.long ? fmtLong(val) : fmtShort(val);
+  }
+  throw new Error(
+    'val is not a non-empty string or a valid number. val=' +
+      JSON.stringify(val)
+  );
+};
+
+/**
+ * Parse the given `str` and return milliseconds.
+ *
+ * @param {String} str
+ * @return {Number}
+ * @api private
+ */
+
+function parse(str) {
+  str = String(str);
+  if (str.length > 100) {
+    return;
+  }
+  var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
+    str
+  );
+  if (!match) {
+    return;
+  }
+  var n = parseFloat(match[1]);
+  var type = (match[2] || 'ms').toLowerCase();
+  switch (type) {
+    case 'years':
+    case 'year':
+    case 'yrs':
+    case 'yr':
+    case 'y':
+      return n * y;
+    case 'weeks':
+    case 'week':
+    case 'w':
+      return n * w;
+    case 'days':
+    case 'day':
+    case 'd':
+      return n * d;
+    case 'hours':
+    case 'hour':
+    case 'hrs':
+    case 'hr':
+    case 'h':
+      return n * h;
+    case 'minutes':
+    case 'minute':
+    case 'mins':
+    case 'min':
+    case 'm':
+      return n * m;
+    case 'seconds':
+    case 'second':
+    case 'secs':
+    case 'sec':
+    case 's':
+      return n * s;
+    case 'milliseconds':
+    case 'millisecond':
+    case 'msecs':
+    case 'msec':
+    case 'ms':
+      return n;
+    default:
+      return undefined;
+  }
+}
+
+/**
+ * Short format for `ms`.
+ *
+ * @param {Number} ms
+ * @return {String}
+ * @api private
+ */
+
+function fmtShort(ms) {
+  var msAbs = Math.abs(ms);
+  if (msAbs >= d) {
+    return Math.round(ms / d) + 'd';
+  }
+  if (msAbs >= h) {
+    return Math.round(ms / h) + 'h';
+  }
+  if (msAbs >= m) {
+    return Math.round(ms / m) + 'm';
+  }
+  if (msAbs >= s) {
+    return Math.round(ms / s) + 's';
+  }
+  return ms + 'ms';
+}
+
+/**
+ * Long format for `ms`.
+ *
+ * @param {Number} ms
+ * @return {String}
+ * @api private
+ */
+
+function fmtLong(ms) {
+  var msAbs = Math.abs(ms);
+  if (msAbs >= d) {
+    return plural(ms, msAbs, d, 'day');
+  }
+  if (msAbs >= h) {
+    return plural(ms, msAbs, h, 'hour');
+  }
+  if (msAbs >= m) {
+    return plural(ms, msAbs, m, 'minute');
+  }
+  if (msAbs >= s) {
+    return plural(ms, msAbs, s, 'second');
+  }
+  return ms + ' ms';
+}
+
+/**
+ * Pluralization helper.
+ */
+
+function plural(ms, msAbs, n, name) {
+  var isPlural = msAbs >= n * 1.5;
+  return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
+}
+
+
+/***/ }),
+
+/***/ 7326:
+/***/ ((module) => {
+
+
+
+class QuickLRU {
+	constructor(options = {}) {
+		if (!(options.maxSize && options.maxSize > 0)) {
+			throw new TypeError('`maxSize` must be a number greater than 0');
+		}
+
+		this.maxSize = options.maxSize;
+		this.onEviction = options.onEviction;
+		this.cache = new Map();
+		this.oldCache = new Map();
+		this._size = 0;
+	}
+
+	_set(key, value) {
+		this.cache.set(key, value);
+		this._size++;
+
+		if (this._size >= this.maxSize) {
+			this._size = 0;
+
+			if (typeof this.onEviction === 'function') {
+				for (const [key, value] of this.oldCache.entries()) {
+					this.onEviction(key, value);
+				}
+			}
+
+			this.oldCache = this.cache;
+			this.cache = new Map();
+		}
+	}
+
+	get(key) {
+		if (this.cache.has(key)) {
+			return this.cache.get(key);
+		}
+
+		if (this.oldCache.has(key)) {
+			const value = this.oldCache.get(key);
+			this.oldCache.delete(key);
+			this._set(key, value);
+			return value;
+		}
+	}
+
+	set(key, value) {
+		if (this.cache.has(key)) {
+			this.cache.set(key, value);
+		} else {
+			this._set(key, value);
+		}
+
+		return this;
+	}
+
+	has(key) {
+		return this.cache.has(key) || this.oldCache.has(key);
+	}
+
+	peek(key) {
+		if (this.cache.has(key)) {
+			return this.cache.get(key);
+		}
+
+		if (this.oldCache.has(key)) {
+			return this.oldCache.get(key);
+		}
+	}
+
+	delete(key) {
+		const deleted = this.cache.delete(key);
+		if (deleted) {
+			this._size--;
+		}
+
+		return this.oldCache.delete(key) || deleted;
+	}
+
+	clear() {
+		this.cache.clear();
+		this.oldCache.clear();
+		this._size = 0;
+	}
+
+	* keys() {
+		for (const [key] of this) {
+			yield key;
+		}
+	}
+
+	* values() {
+		for (const [, value] of this) {
+			yield value;
+		}
+	}
+
+	* [Symbol.iterator]() {
+		for (const item of this.cache) {
+			yield item;
+		}
+
+		for (const item of this.oldCache) {
+			const [key] = item;
+			if (!this.cache.has(key)) {
+				yield item;
+			}
+		}
+	}
+
+	get size() {
+		let oldCacheSize = 0;
+		for (const key of this.oldCache.keys()) {
+			if (!this.cache.has(key)) {
+				oldCacheSize++;
+			}
+		}
+
+		return Math.min(this._size + oldCacheSize, this.maxSize);
+	}
+}
+
+module.exports = QuickLRU;
+
+
+/***/ }),
+
+/***/ 2072:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const tls = __nccwpck_require__(4756);
+
+module.exports = (options = {}, connect = tls.connect) => new Promise((resolve, reject) => {
+	let timeout = false;
+
+	let socket;
+
+	const callback = async () => {
+		await socketPromise;
+
+		socket.off('timeout', onTimeout);
+		socket.off('error', reject);
+
+		if (options.resolveSocket) {
+			resolve({alpnProtocol: socket.alpnProtocol, socket, timeout});
+
+			if (timeout) {
+				await Promise.resolve();
+				socket.emit('timeout');
+			}
+		} else {
+			socket.destroy();
+			resolve({alpnProtocol: socket.alpnProtocol, timeout});
+		}
+	};
+
+	const onTimeout = async () => {
+		timeout = true;
+		callback();
+	};
+
+	const socketPromise = (async () => {
+		try {
+			socket = await connect(options, callback);
+
+			socket.on('error', reject);
+			socket.once('timeout', onTimeout);
+		} catch (error) {
+			reject(error);
+		}
+	})();
+});
+
+
+/***/ }),
+
+/***/ 8804:
+/***/ ((module, exports) => {
+
+exports = module.exports = SemVer
+
+var debug
+/* istanbul ignore next */
+if (typeof process === 'object' &&
+    process.env &&
+    process.env.NODE_DEBUG &&
+    /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
+  debug = function () {
+    var args = Array.prototype.slice.call(arguments, 0)
+    args.unshift('SEMVER')
+    console.log.apply(console, args)
+  }
+} else {
+  debug = function () {}
+}
+
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0'
+
+var MAX_LENGTH = 256
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
+  /* istanbul ignore next */ 9007199254740991
+
+// Max safe segment length for coercion.
+var MAX_SAFE_COMPONENT_LENGTH = 16
+
+var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
+
+// The actual regexps go on exports.re
+var re = exports.re = []
+var safeRe = exports.safeRe = []
+var src = exports.src = []
+var t = exports.tokens = {}
+var R = 0
+
+function tok (n) {
+  t[n] = R++
+}
+
+var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
+
+// Replace some greedy regex tokens to prevent regex dos issues. These regex are
+// used internally via the safeRe object since all inputs in this library get
+// normalized first to trim and collapse all extra whitespace. The original
+// regexes are exported for userland consumption and lower level usage. A
+// future breaking change could export the safer regex only with a note that
+// all input should have extra whitespace removed.
+var safeRegexReplacements = [
+  ['\\s', 1],
+  ['\\d', MAX_LENGTH],
+  [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
+]
+
+function makeSafeRe (value) {
+  for (var i = 0; i < safeRegexReplacements.length; i++) {
+    var token = safeRegexReplacements[i][0]
+    var max = safeRegexReplacements[i][1]
+    value = value
+      .split(token + '*').join(token + '{0,' + max + '}')
+      .split(token + '+').join(token + '{1,' + max + '}')
+  }
+  return value
+}
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+tok('NUMERICIDENTIFIER')
+src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
+tok('NUMERICIDENTIFIERLOOSE')
+src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+tok('NONNUMERICIDENTIFIER')
+src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+tok('MAINVERSION')
+src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
+                   '(' + src[t.NUMERICIDENTIFIER] + ')\\.' +
+                   '(' + src[t.NUMERICIDENTIFIER] + ')'
+
+tok('MAINVERSIONLOOSE')
+src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
+                        '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' +
+                        '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')'
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+tok('PRERELEASEIDENTIFIER')
+src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] +
+                            '|' + src[t.NONNUMERICIDENTIFIER] + ')'
+
+tok('PRERELEASEIDENTIFIERLOOSE')
+src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] +
+                                 '|' + src[t.NONNUMERICIDENTIFIER] + ')'
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+tok('PRERELEASE')
+src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] +
+                  '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))'
+
+tok('PRERELEASELOOSE')
+src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
+                       '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))'
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+tok('BUILDIDENTIFIER')
+src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+tok('BUILD')
+src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] +
+             '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))'
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups.  The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+tok('FULL')
+tok('FULLPLAIN')
+src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] +
+                  src[t.PRERELEASE] + '?' +
+                  src[t.BUILD] + '?'
+
+src[t.FULL] = '^' + src[t.FULLPLAIN] + '$'
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+tok('LOOSEPLAIN')
+src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] +
+                  src[t.PRERELEASELOOSE] + '?' +
+                  src[t.BUILD] + '?'
+
+tok('LOOSE')
+src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$'
+
+tok('GTLT')
+src[t.GTLT] = '((?:<|>)?=?)'
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+tok('XRANGEIDENTIFIERLOOSE')
+src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
+tok('XRANGEIDENTIFIER')
+src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*'
+
+tok('XRANGEPLAIN')
+src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' +
+                   '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
+                   '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' +
+                   '(?:' + src[t.PRERELEASE] + ')?' +
+                   src[t.BUILD] + '?' +
+                   ')?)?'
+
+tok('XRANGEPLAINLOOSE')
+src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
+                        '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
+                        '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' +
+                        '(?:' + src[t.PRERELEASELOOSE] + ')?' +
+                        src[t.BUILD] + '?' +
+                        ')?)?'
+
+tok('XRANGE')
+src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$'
+tok('XRANGELOOSE')
+src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$'
+
+// Coercion.
+// Extract anything that could conceivably be a part of a valid semver
+tok('COERCE')
+src[t.COERCE] = '(^|[^\\d])' +
+              '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
+              '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+              '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+              '(?:$|[^\\d])'
+tok('COERCERTL')
+re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
+safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+tok('LONETILDE')
+src[t.LONETILDE] = '(?:~>?)'
+
+tok('TILDETRIM')
+src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
+re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
+safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
+var tildeTrimReplace = '$1~'
+
+tok('TILDE')
+src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$'
+tok('TILDELOOSE')
+src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$'
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+tok('LONECARET')
+src[t.LONECARET] = '(?:\\^)'
+
+tok('CARETTRIM')
+src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
+re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
+safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
+var caretTrimReplace = '$1^'
+
+tok('CARET')
+src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$'
+tok('CARETLOOSE')
+src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$'
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+tok('COMPARATORLOOSE')
+src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$'
+tok('COMPARATOR')
+src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$'
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+tok('COMPARATORTRIM')
+src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
+                      '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')'
+
+// this one has to use the /g flag
+re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
+safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
+var comparatorTrimReplace = '$1$2$3'
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+tok('HYPHENRANGE')
+src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' +
+                   '\\s+-\\s+' +
+                   '(' + src[t.XRANGEPLAIN] + ')' +
+                   '\\s*$'
+
+tok('HYPHENRANGELOOSE')
+src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' +
+                        '\\s+-\\s+' +
+                        '(' + src[t.XRANGEPLAINLOOSE] + ')' +
+                        '\\s*$'
+
+// Star ranges basically just allow anything at all.
+tok('STAR')
+src[t.STAR] = '(<|>)?=?\\s*\\*'
+
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+  debug(i, src[i])
+  if (!re[i]) {
+    re[i] = new RegExp(src[i])
+
+    // Replace all greedy whitespace to prevent regex dos issues. These regex are
+    // used internally via the safeRe object since all inputs in this library get
+    // normalized first to trim and collapse all extra whitespace. The original
+    // regexes are exported for userland consumption and lower level usage. A
+    // future breaking change could export the safer regex only with a note that
+    // all input should have extra whitespace removed.
+    safeRe[i] = new RegExp(makeSafeRe(src[i]))
+  }
+}
+
+exports.parse = parse
+function parse (version, options) {
+  if (!options || typeof options !== 'object') {
+    options = {
+      loose: !!options,
+      includePrerelease: false
+    }
+  }
+
+  if (version instanceof SemVer) {
+    return version
+  }
+
+  if (typeof version !== 'string') {
+    return null
+  }
+
+  if (version.length > MAX_LENGTH) {
+    return null
+  }
+
+  var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
+  if (!r.test(version)) {
+    return null
+  }
+
+  try {
+    return new SemVer(version, options)
+  } catch (er) {
+    return null
+  }
+}
+
+exports.valid = valid
+function valid (version, options) {
+  var v = parse(version, options)
+  return v ? v.version : null
+}
+
+exports.clean = clean
+function clean (version, options) {
+  var s = parse(version.trim().replace(/^[=v]+/, ''), options)
+  return s ? s.version : null
+}
+
+exports.SemVer = SemVer
+
+function SemVer (version, options) {
+  if (!options || typeof options !== 'object') {
+    options = {
+      loose: !!options,
+      includePrerelease: false
+    }
+  }
+  if (version instanceof SemVer) {
+    if (version.loose === options.loose) {
+      return version
+    } else {
+      version = version.version
+    }
+  } else if (typeof version !== 'string') {
+    throw new TypeError('Invalid Version: ' + version)
+  }
+
+  if (version.length > MAX_LENGTH) {
+    throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
+  }
+
+  if (!(this instanceof SemVer)) {
+    return new SemVer(version, options)
+  }
+
+  debug('SemVer', version, options)
+  this.options = options
+  this.loose = !!options.loose
+
+  var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
+
+  if (!m) {
+    throw new TypeError('Invalid Version: ' + version)
+  }
+
+  this.raw = version
+
+  // these are actually numbers
+  this.major = +m[1]
+  this.minor = +m[2]
+  this.patch = +m[3]
+
+  if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
+    throw new TypeError('Invalid major version')
+  }
+
+  if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
+    throw new TypeError('Invalid minor version')
+  }
+
+  if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
+    throw new TypeError('Invalid patch version')
+  }
+
+  // numberify any prerelease numeric ids
+  if (!m[4]) {
+    this.prerelease = []
+  } else {
+    this.prerelease = m[4].split('.').map(function (id) {
+      if (/^[0-9]+$/.test(id)) {
+        var num = +id
+        if (num >= 0 && num < MAX_SAFE_INTEGER) {
+          return num
+        }
+      }
+      return id
+    })
+  }
+
+  this.build = m[5] ? m[5].split('.') : []
+  this.format()
+}
+
+SemVer.prototype.format = function () {
+  this.version = this.major + '.' + this.minor + '.' + this.patch
+  if (this.prerelease.length) {
+    this.version += '-' + this.prerelease.join('.')
+  }
+  return this.version
+}
+
+SemVer.prototype.toString = function () {
+  return this.version
+}
+
+SemVer.prototype.compare = function (other) {
+  debug('SemVer.compare', this.version, this.options, other)
+  if (!(other instanceof SemVer)) {
+    other = new SemVer(other, this.options)
+  }
+
+  return this.compareMain(other) || this.comparePre(other)
+}
+
+SemVer.prototype.compareMain = function (other) {
+  if (!(other instanceof SemVer)) {
+    other = new SemVer(other, this.options)
+  }
+
+  return compareIdentifiers(this.major, other.major) ||
+         compareIdentifiers(this.minor, other.minor) ||
+         compareIdentifiers(this.patch, other.patch)
+}
+
+SemVer.prototype.comparePre = function (other) {
+  if (!(other instanceof SemVer)) {
+    other = new SemVer(other, this.options)
+  }
+
+  // NOT having a prerelease is > having one
+  if (this.prerelease.length && !other.prerelease.length) {
+    return -1
+  } else if (!this.prerelease.length && other.prerelease.length) {
+    return 1
+  } else if (!this.prerelease.length && !other.prerelease.length) {
+    return 0
+  }
+
+  var i = 0
+  do {
+    var a = this.prerelease[i]
+    var b = other.prerelease[i]
+    debug('prerelease compare', i, a, b)
+    if (a === undefined && b === undefined) {
+      return 0
+    } else if (b === undefined) {
+      return 1
+    } else if (a === undefined) {
+      return -1
+    } else if (a === b) {
+      continue
+    } else {
+      return compareIdentifiers(a, b)
+    }
+  } while (++i)
+}
+
+SemVer.prototype.compareBuild = function (other) {
+  if (!(other instanceof SemVer)) {
+    other = new SemVer(other, this.options)
+  }
+
+  var i = 0
+  do {
+    var a = this.build[i]
+    var b = other.build[i]
+    debug('prerelease compare', i, a, b)
+    if (a === undefined && b === undefined) {
+      return 0
+    } else if (b === undefined) {
+      return 1
+    } else if (a === undefined) {
+      return -1
+    } else if (a === b) {
+      continue
+    } else {
+      return compareIdentifiers(a, b)
+    }
+  } while (++i)
+}
+
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function (release, identifier) {
+  switch (release) {
+    case 'premajor':
+      this.prerelease.length = 0
+      this.patch = 0
+      this.minor = 0
+      this.major++
+      this.inc('pre', identifier)
+      break
+    case 'preminor':
+      this.prerelease.length = 0
+      this.patch = 0
+      this.minor++
+      this.inc('pre', identifier)
+      break
+    case 'prepatch':
+      // If this is already a prerelease, it will bump to the next version
+      // drop any prereleases that might already exist, since they are not
+      // relevant at this point.
+      this.prerelease.length = 0
+      this.inc('patch', identifier)
+      this.inc('pre', identifier)
+      break
+    // If the input is a non-prerelease version, this acts the same as
+    // prepatch.
+    case 'prerelease':
+      if (this.prerelease.length === 0) {
+        this.inc('patch', identifier)
+      }
+      this.inc('pre', identifier)
+      break
+
+    case 'major':
+      // If this is a pre-major version, bump up to the same major version.
+      // Otherwise increment major.
+      // 1.0.0-5 bumps to 1.0.0
+      // 1.1.0 bumps to 2.0.0
+      if (this.minor !== 0 ||
+          this.patch !== 0 ||
+          this.prerelease.length === 0) {
+        this.major++
+      }
+      this.minor = 0
+      this.patch = 0
+      this.prerelease = []
+      break
+    case 'minor':
+      // If this is a pre-minor version, bump up to the same minor version.
+      // Otherwise increment minor.
+      // 1.2.0-5 bumps to 1.2.0
+      // 1.2.1 bumps to 1.3.0
+      if (this.patch !== 0 || this.prerelease.length === 0) {
+        this.minor++
+      }
+      this.patch = 0
+      this.prerelease = []
+      break
+    case 'patch':
+      // If this is not a pre-release version, it will increment the patch.
+      // If it is a pre-release it will bump up to the same patch version.
+      // 1.2.0-5 patches to 1.2.0
+      // 1.2.0 patches to 1.2.1
+      if (this.prerelease.length === 0) {
+        this.patch++
+      }
+      this.prerelease = []
+      break
+    // This probably shouldn't be used publicly.
+    // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+    case 'pre':
+      if (this.prerelease.length === 0) {
+        this.prerelease = [0]
+      } else {
+        var i = this.prerelease.length
+        while (--i >= 0) {
+          if (typeof this.prerelease[i] === 'number') {
+            this.prerelease[i]++
+            i = -2
+          }
+        }
+        if (i === -1) {
+          // didn't increment anything
+          this.prerelease.push(0)
+        }
+      }
+      if (identifier) {
+        // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+        // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+        if (this.prerelease[0] === identifier) {
+          if (isNaN(this.prerelease[1])) {
+            this.prerelease = [identifier, 0]
+          }
+        } else {
+          this.prerelease = [identifier, 0]
+        }
+      }
+      break
+
+    default:
+      throw new Error('invalid increment argument: ' + release)
+  }
+  this.format()
+  this.raw = this.version
+  return this
+}
+
+exports.inc = inc
+function inc (version, release, loose, identifier) {
+  if (typeof (loose) === 'string') {
+    identifier = loose
+    loose = undefined
+  }
+
+  try {
+    return new SemVer(version, loose).inc(release, identifier).version
+  } catch (er) {
+    return null
+  }
+}
+
+exports.diff = diff
+function diff (version1, version2) {
+  if (eq(version1, version2)) {
+    return null
+  } else {
+    var v1 = parse(version1)
+    var v2 = parse(version2)
+    var prefix = ''
+    if (v1.prerelease.length || v2.prerelease.length) {
+      prefix = 'pre'
+      var defaultResult = 'prerelease'
+    }
+    for (var key in v1) {
+      if (key === 'major' || key === 'minor' || key === 'patch') {
+        if (v1[key] !== v2[key]) {
+          return prefix + key
+        }
+      }
+    }
+    return defaultResult // may be undefined
+  }
+}
+
+exports.compareIdentifiers = compareIdentifiers
+
+var numeric = /^[0-9]+$/
+function compareIdentifiers (a, b) {
+  var anum = numeric.test(a)
+  var bnum = numeric.test(b)
+
+  if (anum && bnum) {
+    a = +a
+    b = +b
+  }
+
+  return a === b ? 0
+    : (anum && !bnum) ? -1
+    : (bnum && !anum) ? 1
+    : a < b ? -1
+    : 1
+}
+
+exports.rcompareIdentifiers = rcompareIdentifiers
+function rcompareIdentifiers (a, b) {
+  return compareIdentifiers(b, a)
+}
+
+exports.major = major
+function major (a, loose) {
+  return new SemVer(a, loose).major
+}
+
+exports.minor = minor
+function minor (a, loose) {
+  return new SemVer(a, loose).minor
+}
+
+exports.patch = patch
+function patch (a, loose) {
+  return new SemVer(a, loose).patch
+}
+
+exports.compare = compare
+function compare (a, b, loose) {
+  return new SemVer(a, loose).compare(new SemVer(b, loose))
+}
+
+exports.compareLoose = compareLoose
+function compareLoose (a, b) {
+  return compare(a, b, true)
+}
+
+exports.compareBuild = compareBuild
+function compareBuild (a, b, loose) {
+  var versionA = new SemVer(a, loose)
+  var versionB = new SemVer(b, loose)
+  return versionA.compare(versionB) || versionA.compareBuild(versionB)
+}
+
+exports.rcompare = rcompare
+function rcompare (a, b, loose) {
+  return compare(b, a, loose)
+}
+
+exports.sort = sort
+function sort (list, loose) {
+  return list.sort(function (a, b) {
+    return exports.compareBuild(a, b, loose)
+  })
+}
+
+exports.rsort = rsort
+function rsort (list, loose) {
+  return list.sort(function (a, b) {
+    return exports.compareBuild(b, a, loose)
+  })
+}
+
+exports.gt = gt
+function gt (a, b, loose) {
+  return compare(a, b, loose) > 0
+}
+
+exports.lt = lt
+function lt (a, b, loose) {
+  return compare(a, b, loose) < 0
+}
+
+exports.eq = eq
+function eq (a, b, loose) {
+  return compare(a, b, loose) === 0
+}
+
+exports.neq = neq
+function neq (a, b, loose) {
+  return compare(a, b, loose) !== 0
+}
+
+exports.gte = gte
+function gte (a, b, loose) {
+  return compare(a, b, loose) >= 0
+}
+
+exports.lte = lte
+function lte (a, b, loose) {
+  return compare(a, b, loose) <= 0
+}
+
+exports.cmp = cmp
+function cmp (a, op, b, loose) {
+  switch (op) {
+    case '===':
+      if (typeof a === 'object')
+        a = a.version
+      if (typeof b === 'object')
+        b = b.version
+      return a === b
+
+    case '!==':
+      if (typeof a === 'object')
+        a = a.version
+      if (typeof b === 'object')
+        b = b.version
+      return a !== b
+
+    case '':
+    case '=':
+    case '==':
+      return eq(a, b, loose)
+
+    case '!=':
+      return neq(a, b, loose)
+
+    case '>':
+      return gt(a, b, loose)
+
+    case '>=':
+      return gte(a, b, loose)
+
+    case '<':
+      return lt(a, b, loose)
+
+    case '<=':
+      return lte(a, b, loose)
+
+    default:
+      throw new TypeError('Invalid operator: ' + op)
+  }
+}
+
+exports.Comparator = Comparator
+function Comparator (comp, options) {
+  if (!options || typeof options !== 'object') {
+    options = {
+      loose: !!options,
+      includePrerelease: false
+    }
+  }
+
+  if (comp instanceof Comparator) {
+    if (comp.loose === !!options.loose) {
+      return comp
+    } else {
+      comp = comp.value
+    }
+  }
+
+  if (!(this instanceof Comparator)) {
+    return new Comparator(comp, options)
+  }
+
+  comp = comp.trim().split(/\s+/).join(' ')
+  debug('comparator', comp, options)
+  this.options = options
+  this.loose = !!options.loose
+  this.parse(comp)
+
+  if (this.semver === ANY) {
+    this.value = ''
+  } else {
+    this.value = this.operator + this.semver.version
+  }
+
+  debug('comp', this)
+}
+
+var ANY = {}
+Comparator.prototype.parse = function (comp) {
+  var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
+  var m = comp.match(r)
+
+  if (!m) {
+    throw new TypeError('Invalid comparator: ' + comp)
+  }
+
+  this.operator = m[1] !== undefined ? m[1] : ''
+  if (this.operator === '=') {
+    this.operator = ''
+  }
+
+  // if it literally is just '>' or '' then allow anything.
+  if (!m[2]) {
+    this.semver = ANY
+  } else {
+    this.semver = new SemVer(m[2], this.options.loose)
+  }
+}
+
+Comparator.prototype.toString = function () {
+  return this.value
+}
+
+Comparator.prototype.test = function (version) {
+  debug('Comparator.test', version, this.options.loose)
+
+  if (this.semver === ANY || version === ANY) {
+    return true
+  }
+
+  if (typeof version === 'string') {
+    try {
+      version = new SemVer(version, this.options)
+    } catch (er) {
+      return false
+    }
+  }
+
+  return cmp(version, this.operator, this.semver, this.options)
+}
+
+Comparator.prototype.intersects = function (comp, options) {
+  if (!(comp instanceof Comparator)) {
+    throw new TypeError('a Comparator is required')
+  }
+
+  if (!options || typeof options !== 'object') {
+    options = {
+      loose: !!options,
+      includePrerelease: false
+    }
+  }
+
+  var rangeTmp
+
+  if (this.operator === '') {
+    if (this.value === '') {
+      return true
+    }
+    rangeTmp = new Range(comp.value, options)
+    return satisfies(this.value, rangeTmp, options)
+  } else if (comp.operator === '') {
+    if (comp.value === '') {
+      return true
+    }
+    rangeTmp = new Range(this.value, options)
+    return satisfies(comp.semver, rangeTmp, options)
+  }
+
+  var sameDirectionIncreasing =
+    (this.operator === '>=' || this.operator === '>') &&
+    (comp.operator === '>=' || comp.operator === '>')
+  var sameDirectionDecreasing =
+    (this.operator === '<=' || this.operator === '<') &&
+    (comp.operator === '<=' || comp.operator === '<')
+  var sameSemVer = this.semver.version === comp.semver.version
+  var differentDirectionsInclusive =
+    (this.operator === '>=' || this.operator === '<=') &&
+    (comp.operator === '>=' || comp.operator === '<=')
+  var oppositeDirectionsLessThan =
+    cmp(this.semver, '<', comp.semver, options) &&
+    ((this.operator === '>=' || this.operator === '>') &&
+    (comp.operator === '<=' || comp.operator === '<'))
+  var oppositeDirectionsGreaterThan =
+    cmp(this.semver, '>', comp.semver, options) &&
+    ((this.operator === '<=' || this.operator === '<') &&
+    (comp.operator === '>=' || comp.operator === '>'))
+
+  return sameDirectionIncreasing || sameDirectionDecreasing ||
+    (sameSemVer && differentDirectionsInclusive) ||
+    oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
+}
+
+exports.Range = Range
+function Range (range, options) {
+  if (!options || typeof options !== 'object') {
+    options = {
+      loose: !!options,
+      includePrerelease: false
+    }
+  }
+
+  if (range instanceof Range) {
+    if (range.loose === !!options.loose &&
+        range.includePrerelease === !!options.includePrerelease) {
+      return range
+    } else {
+      return new Range(range.raw, options)
+    }
+  }
+
+  if (range instanceof Comparator) {
+    return new Range(range.value, options)
+  }
+
+  if (!(this instanceof Range)) {
+    return new Range(range, options)
+  }
+
+  this.options = options
+  this.loose = !!options.loose
+  this.includePrerelease = !!options.includePrerelease
+
+  // First reduce all whitespace as much as possible so we do not have to rely
+  // on potentially slow regexes like \s*. This is then stored and used for
+  // future error messages as well.
+  this.raw = range
+    .trim()
+    .split(/\s+/)
+    .join(' ')
+
+  // First, split based on boolean or ||
+  this.set = this.raw.split('||').map(function (range) {
+    return this.parseRange(range.trim())
+  }, this).filter(function (c) {
+    // throw out any that are not relevant for whatever reason
+    return c.length
+  })
+
+  if (!this.set.length) {
+    throw new TypeError('Invalid SemVer Range: ' + this.raw)
+  }
+
+  this.format()
+}
+
+Range.prototype.format = function () {
+  this.range = this.set.map(function (comps) {
+    return comps.join(' ').trim()
+  }).join('||').trim()
+  return this.range
+}
+
+Range.prototype.toString = function () {
+  return this.range
+}
+
+Range.prototype.parseRange = function (range) {
+  var loose = this.options.loose
+  // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+  var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
+  range = range.replace(hr, hyphenReplace)
+  debug('hyphen replace', range)
+  // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+  range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
+  debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
+
+  // `~ 1.2.3` => `~1.2.3`
+  range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
+
+  // `^ 1.2.3` => `^1.2.3`
+  range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
+
+  // normalize spaces
+  range = range.split(/\s+/).join(' ')
+
+  // At this point, the range is completely trimmed and
+  // ready to be split into comparators.
+
+  var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
+  var set = range.split(' ').map(function (comp) {
+    return parseComparator(comp, this.options)
+  }, this).join(' ').split(/\s+/)
+  if (this.options.loose) {
+    // in loose mode, throw out any that are not valid comparators
+    set = set.filter(function (comp) {
+      return !!comp.match(compRe)
+    })
+  }
+  set = set.map(function (comp) {
+    return new Comparator(comp, this.options)
+  }, this)
+
+  return set
+}
+
+Range.prototype.intersects = function (range, options) {
+  if (!(range instanceof Range)) {
+    throw new TypeError('a Range is required')
+  }
+
+  return this.set.some(function (thisComparators) {
+    return (
+      isSatisfiable(thisComparators, options) &&
+      range.set.some(function (rangeComparators) {
+        return (
+          isSatisfiable(rangeComparators, options) &&
+          thisComparators.every(function (thisComparator) {
+            return rangeComparators.every(function (rangeComparator) {
+              return thisComparator.intersects(rangeComparator, options)
+            })
+          })
+        )
+      })
+    )
+  })
+}
+
+// take a set of comparators and determine whether there
+// exists a version which can satisfy it
+function isSatisfiable (comparators, options) {
+  var result = true
+  var remainingComparators = comparators.slice()
+  var testComparator = remainingComparators.pop()
+
+  while (result && remainingComparators.length) {
+    result = remainingComparators.every(function (otherComparator) {
+      return testComparator.intersects(otherComparator, options)
+    })
+
+    testComparator = remainingComparators.pop()
+  }
+
+  return result
+}
+
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators
+function toComparators (range, options) {
+  return new Range(range, options).set.map(function (comp) {
+    return comp.map(function (c) {
+      return c.value
+    }).join(' ').trim().split(' ')
+  })
+}
+
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator (comp, options) {
+  debug('comp', comp, options)
+  comp = replaceCarets(comp, options)
+  debug('caret', comp)
+  comp = replaceTildes(comp, options)
+  debug('tildes', comp)
+  comp = replaceXRanges(comp, options)
+  debug('xrange', comp)
+  comp = replaceStars(comp, options)
+  debug('stars', comp)
+  return comp
+}
+
+function isX (id) {
+  return !id || id.toLowerCase() === 'x' || id === '*'
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes (comp, options) {
+  return comp.trim().split(/\s+/).map(function (comp) {
+    return replaceTilde(comp, options)
+  }).join(' ')
+}
+
+function replaceTilde (comp, options) {
+  var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
+  return comp.replace(r, function (_, M, m, p, pr) {
+    debug('tilde', comp, _, M, m, p, pr)
+    var ret
+
+    if (isX(M)) {
+      ret = ''
+    } else if (isX(m)) {
+      ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+    } else if (isX(p)) {
+      // ~1.2 == >=1.2.0 <1.3.0
+      ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+    } else if (pr) {
+      debug('replaceTilde pr', pr)
+      ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+            ' <' + M + '.' + (+m + 1) + '.0'
+    } else {
+      // ~1.2.3 == >=1.2.3 <1.3.0
+      ret = '>=' + M + '.' + m + '.' + p +
+            ' <' + M + '.' + (+m + 1) + '.0'
+    }
+
+    debug('tilde return', ret)
+    return ret
+  })
+}
+
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets (comp, options) {
+  return comp.trim().split(/\s+/).map(function (comp) {
+    return replaceCaret(comp, options)
+  }).join(' ')
+}
+
+function replaceCaret (comp, options) {
+  debug('caret', comp, options)
+  var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
+  return comp.replace(r, function (_, M, m, p, pr) {
+    debug('caret', comp, _, M, m, p, pr)
+    var ret
+
+    if (isX(M)) {
+      ret = ''
+    } else if (isX(m)) {
+      ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+    } else if (isX(p)) {
+      if (M === '0') {
+        ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+      } else {
+        ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
+      }
+    } else if (pr) {
+      debug('replaceCaret pr', pr)
+      if (M === '0') {
+        if (m === '0') {
+          ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+                ' <' + M + '.' + m + '.' + (+p + 1)
+        } else {
+          ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+                ' <' + M + '.' + (+m + 1) + '.0'
+        }
+      } else {
+        ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+              ' <' + (+M + 1) + '.0.0'
+      }
+    } else {
+      debug('no pr')
+      if (M === '0') {
+        if (m === '0') {
+          ret = '>=' + M + '.' + m + '.' + p +
+                ' <' + M + '.' + m + '.' + (+p + 1)
+        } else {
+          ret = '>=' + M + '.' + m + '.' + p +
+                ' <' + M + '.' + (+m + 1) + '.0'
+        }
+      } else {
+        ret = '>=' + M + '.' + m + '.' + p +
+              ' <' + (+M + 1) + '.0.0'
+      }
+    }
+
+    debug('caret return', ret)
+    return ret
+  })
+}
+
+function replaceXRanges (comp, options) {
+  debug('replaceXRanges', comp, options)
+  return comp.split(/\s+/).map(function (comp) {
+    return replaceXRange(comp, options)
+  }).join(' ')
+}
+
+function replaceXRange (comp, options) {
+  comp = comp.trim()
+  var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
+  return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
+    debug('xRange', comp, ret, gtlt, M, m, p, pr)
+    var xM = isX(M)
+    var xm = xM || isX(m)
+    var xp = xm || isX(p)
+    var anyX = xp
+
+    if (gtlt === '=' && anyX) {
+      gtlt = ''
+    }
+
+    // if we're including prereleases in the match, then we need
+    // to fix this to -0, the lowest possible prerelease value
+    pr = options.includePrerelease ? '-0' : ''
+
+    if (xM) {
+      if (gtlt === '>' || gtlt === '<') {
+        // nothing is allowed
+        ret = '<0.0.0-0'
+      } else {
+        // nothing is forbidden
+        ret = '*'
+      }
+    } else if (gtlt && anyX) {
+      // we know patch is an x, because we have any x at all.
+      // replace X with 0
+      if (xm) {
+        m = 0
+      }
+      p = 0
+
+      if (gtlt === '>') {
+        // >1 => >=2.0.0
+        // >1.2 => >=1.3.0
+        // >1.2.3 => >= 1.2.4
+        gtlt = '>='
+        if (xm) {
+          M = +M + 1
+          m = 0
+          p = 0
+        } else {
+          m = +m + 1
+          p = 0
+        }
+      } else if (gtlt === '<=') {
+        // <=0.7.x is actually <0.8.0, since any 0.7.x should
+        // pass.  Similarly, <=7.x is actually <8.0.0, etc.
+        gtlt = '<'
+        if (xm) {
+          M = +M + 1
+        } else {
+          m = +m + 1
+        }
+      }
+
+      ret = gtlt + M + '.' + m + '.' + p + pr
+    } else if (xm) {
+      ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr
+    } else if (xp) {
+      ret = '>=' + M + '.' + m + '.0' + pr +
+        ' <' + M + '.' + (+m + 1) + '.0' + pr
+    }
+
+    debug('xRange return', ret)
+
+    return ret
+  })
+}
+
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars (comp, options) {
+  debug('replaceStars', comp, options)
+  // Looseness is ignored here.  star is always as loose as it gets!
+  return comp.trim().replace(safeRe[t.STAR], '')
+}
+
+// This function is passed to string.replace(re[t.HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace ($0,
+  from, fM, fm, fp, fpr, fb,
+  to, tM, tm, tp, tpr, tb) {
+  if (isX(fM)) {
+    from = ''
+  } else if (isX(fm)) {
+    from = '>=' + fM + '.0.0'
+  } else if (isX(fp)) {
+    from = '>=' + fM + '.' + fm + '.0'
+  } else {
+    from = '>=' + from
+  }
+
+  if (isX(tM)) {
+    to = ''
+  } else if (isX(tm)) {
+    to = '<' + (+tM + 1) + '.0.0'
+  } else if (isX(tp)) {
+    to = '<' + tM + '.' + (+tm + 1) + '.0'
+  } else if (tpr) {
+    to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
+  } else {
+    to = '<=' + to
+  }
+
+  return (from + ' ' + to).trim()
+}
+
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function (version) {
+  if (!version) {
+    return false
+  }
+
+  if (typeof version === 'string') {
+    try {
+      version = new SemVer(version, this.options)
+    } catch (er) {
+      return false
+    }
+  }
+
+  for (var i = 0; i < this.set.length; i++) {
+    if (testSet(this.set[i], version, this.options)) {
+      return true
+    }
+  }
+  return false
+}
+
+function testSet (set, version, options) {
+  for (var i = 0; i < set.length; i++) {
+    if (!set[i].test(version)) {
+      return false
+    }
+  }
+
+  if (version.prerelease.length && !options.includePrerelease) {
+    // Find the set of versions that are allowed to have prereleases
+    // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+    // That should allow `1.2.3-pr.2` to pass.
+    // However, `1.2.4-alpha.notready` should NOT be allowed,
+    // even though it's within the range set by the comparators.
+    for (i = 0; i < set.length; i++) {
+      debug(set[i].semver)
+      if (set[i].semver === ANY) {
+        continue
+      }
+
+      if (set[i].semver.prerelease.length > 0) {
+        var allowed = set[i].semver
+        if (allowed.major === version.major &&
+            allowed.minor === version.minor &&
+            allowed.patch === version.patch) {
+          return true
+        }
+      }
+    }
+
+    // Version has a -pre, but it's not one of the ones we like.
+    return false
+  }
+
+  return true
+}
+
+exports.satisfies = satisfies
+function satisfies (version, range, options) {
+  try {
+    range = new Range(range, options)
+  } catch (er) {
+    return false
+  }
+  return range.test(version)
+}
+
+exports.maxSatisfying = maxSatisfying
+function maxSatisfying (versions, range, options) {
+  var max = null
+  var maxSV = null
+  try {
+    var rangeObj = new Range(range, options)
+  } catch (er) {
+    return null
+  }
+  versions.forEach(function (v) {
+    if (rangeObj.test(v)) {
+      // satisfies(v, range, options)
+      if (!max || maxSV.compare(v) === -1) {
+        // compare(max, v, true)
+        max = v
+        maxSV = new SemVer(max, options)
+      }
+    }
+  })
+  return max
+}
+
+exports.minSatisfying = minSatisfying
+function minSatisfying (versions, range, options) {
+  var min = null
+  var minSV = null
+  try {
+    var rangeObj = new Range(range, options)
+  } catch (er) {
+    return null
+  }
+  versions.forEach(function (v) {
+    if (rangeObj.test(v)) {
+      // satisfies(v, range, options)
+      if (!min || minSV.compare(v) === 1) {
+        // compare(min, v, true)
+        min = v
+        minSV = new SemVer(min, options)
+      }
+    }
+  })
+  return min
+}
+
+exports.minVersion = minVersion
+function minVersion (range, loose) {
+  range = new Range(range, loose)
+
+  var minver = new SemVer('0.0.0')
+  if (range.test(minver)) {
+    return minver
+  }
+
+  minver = new SemVer('0.0.0-0')
+  if (range.test(minver)) {
+    return minver
+  }
+
+  minver = null
+  for (var i = 0; i < range.set.length; ++i) {
+    var comparators = range.set[i]
+
+    comparators.forEach(function (comparator) {
+      // Clone to avoid manipulating the comparator's semver object.
+      var compver = new SemVer(comparator.semver.version)
+      switch (comparator.operator) {
+        case '>':
+          if (compver.prerelease.length === 0) {
+            compver.patch++
+          } else {
+            compver.prerelease.push(0)
+          }
+          compver.raw = compver.format()
+          /* fallthrough */
+        case '':
+        case '>=':
+          if (!minver || gt(minver, compver)) {
+            minver = compver
+          }
+          break
+        case '<':
+        case '<=':
+          /* Ignore maximum versions */
+          break
+        /* istanbul ignore next */
+        default:
+          throw new Error('Unexpected operation: ' + comparator.operator)
+      }
+    })
+  }
+
+  if (minver && range.test(minver)) {
+    return minver
+  }
+
+  return null
+}
+
+exports.validRange = validRange
+function validRange (range, options) {
+  try {
+    // Return '*' instead of '' so that truthiness works.
+    // This will throw if it's invalid anyway
+    return new Range(range, options).range || '*'
+  } catch (er) {
+    return null
+  }
+}
+
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr
+function ltr (version, range, options) {
+  return outside(version, range, '<', options)
+}
+
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr
+function gtr (version, range, options) {
+  return outside(version, range, '>', options)
+}
+
+exports.outside = outside
+function outside (version, range, hilo, options) {
+  version = new SemVer(version, options)
+  range = new Range(range, options)
+
+  var gtfn, ltefn, ltfn, comp, ecomp
+  switch (hilo) {
+    case '>':
+      gtfn = gt
+      ltefn = lte
+      ltfn = lt
+      comp = '>'
+      ecomp = '>='
+      break
+    case '<':
+      gtfn = lt
+      ltefn = gte
+      ltfn = gt
+      comp = '<'
+      ecomp = '<='
+      break
+    default:
+      throw new TypeError('Must provide a hilo val of "<" or ">"')
+  }
+
+  // If it satisifes the range it is not outside
+  if (satisfies(version, range, options)) {
+    return false
+  }
+
+  // From now on, variable terms are as if we're in "gtr" mode.
+  // but note that everything is flipped for the "ltr" function.
+
+  for (var i = 0; i < range.set.length; ++i) {
+    var comparators = range.set[i]
+
+    var high = null
+    var low = null
+
+    comparators.forEach(function (comparator) {
+      if (comparator.semver === ANY) {
+        comparator = new Comparator('>=0.0.0')
+      }
+      high = high || comparator
+      low = low || comparator
+      if (gtfn(comparator.semver, high.semver, options)) {
+        high = comparator
+      } else if (ltfn(comparator.semver, low.semver, options)) {
+        low = comparator
+      }
+    })
+
+    // If the edge version comparator has a operator then our version
+    // isn't outside it
+    if (high.operator === comp || high.operator === ecomp) {
+      return false
+    }
+
+    // If the lowest version comparator has an operator and our version
+    // is less than it then it isn't higher than the range
+    if ((!low.operator || low.operator === comp) &&
+        ltefn(version, low.semver)) {
+      return false
+    } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+      return false
+    }
+  }
+  return true
+}
+
+exports.prerelease = prerelease
+function prerelease (version, options) {
+  var parsed = parse(version, options)
+  return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
+}
+
+exports.intersects = intersects
+function intersects (r1, r2, options) {
+  r1 = new Range(r1, options)
+  r2 = new Range(r2, options)
+  return r1.intersects(r2)
+}
+
+exports.coerce = coerce
+function coerce (version, options) {
+  if (version instanceof SemVer) {
+    return version
+  }
+
+  if (typeof version === 'number') {
+    version = String(version)
+  }
+
+  if (typeof version !== 'string') {
+    return null
+  }
+
+  options = options || {}
+
+  var match = null
+  if (!options.rtl) {
+    match = version.match(safeRe[t.COERCE])
+  } else {
+    // Find the right-most coercible string that does not share
+    // a terminus with a more left-ward coercible string.
+    // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
+    //
+    // Walk through the string checking with a /g regexp
+    // Manually set the index so as to pick up overlapping matches.
+    // Stop when we get a match that ends at the string end, since no
+    // coercible string can be more right-ward without the same terminus.
+    var next
+    while ((next = safeRe[t.COERCERTL].exec(version)) &&
+      (!match || match.index + match[0].length !== version.length)
+    ) {
+      if (!match ||
+          next.index + next[0].length !== match.index + match[0].length) {
+        match = next
+      }
+      safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
+    }
+    // leave it in a clean state
+    safeRe[t.COERCERTL].lastIndex = -1
+  }
+
+  if (match === null) {
+    return null
+  }
+
+  return parse(match[2] +
+    '.' + (match[3] || '0') +
+    '.' + (match[4] || '0'), options)
+}
+
+
+/***/ }),
+
+/***/ 6349:
+/***/ ((module) => {
+
+const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/;
+const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/;
+// const octRegex = /0x[a-z0-9]+/;
+// const binRegex = /0x[a-z0-9]+/;
+
+
+//polyfill
+if (!Number.parseInt && window.parseInt) {
+    Number.parseInt = window.parseInt;
+}
+if (!Number.parseFloat && window.parseFloat) {
+    Number.parseFloat = window.parseFloat;
+}
+
+  
+const consider = {
+    hex :  true,
+    leadingZeros: true,
+    decimalPoint: "\.",
+    eNotation: true
+    //skipLike: /regex/
+};
+
+function toNumber(str, options = {}){
+    // const options = Object.assign({}, consider);
+    // if(opt.leadingZeros === false){
+    //     options.leadingZeros = false;
+    // }else if(opt.hex === false){
+    //     options.hex = false;
+    // }
+
+    options = Object.assign({}, consider, options );
+    if(!str || typeof str !== "string" ) return str;
+    
+    let trimmedStr  = str.trim();
+    // if(trimmedStr === "0.0") return 0;
+    // else if(trimmedStr === "+0.0") return 0;
+    // else if(trimmedStr === "-0.0") return -0;
+
+    if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str;
+    else if (options.hex && hexRegex.test(trimmedStr)) {
+        return Number.parseInt(trimmedStr, 16);
+    // } else if (options.parseOct && octRegex.test(str)) {
+    //     return Number.parseInt(val, 8);
+    // }else if (options.parseBin && binRegex.test(str)) {
+    //     return Number.parseInt(val, 2);
+    }else{
+        //separate negative sign, leading zeros, and rest number
+        const match = numRegex.exec(trimmedStr);
+        if(match){
+            const sign = match[1];
+            const leadingZeros = match[2];
+            let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros
+            //trim ending zeros for floating number
+            
+            const eNotation = match[4] || match[6];
+            if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123
+            else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123
+            else{//no leading zeros or leading zeros are allowed
+                const num = Number(trimmedStr);
+                const numStr = "" + num;
+                if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation
+                    if(options.eNotation) return num;
+                    else return str;
+                }else if(eNotation){ //given number has enotation
+                    if(options.eNotation) return num;
+                    else return str;
+                }else if(trimmedStr.indexOf(".") !== -1){ //floating number
+                    // const decimalPart = match[5].substr(1);
+                    // const intPart = trimmedStr.substr(0,trimmedStr.indexOf("."));
+
+                    
+                    // const p = numStr.indexOf(".");
+                    // const givenIntPart = numStr.substr(0,p);
+                    // const givenDecPart = numStr.substr(p+1);
+                    if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0
+                    else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000
+                    else if( sign && numStr === "-"+numTrimmedByZeros) return num;
+                    else return str;
+                }
+                
+                if(leadingZeros){
+                    // if(numTrimmedByZeros === numStr){
+                    //     if(options.leadingZeros) return num;
+                    //     else return str;
+                    // }else return str;
+                    if(numTrimmedByZeros === numStr) return num;
+                    else if(sign+numTrimmedByZeros === numStr) return num;
+                    else return str;
+                }
+
+                if(trimmedStr === numStr) return num;
+                else if(trimmedStr === sign+numStr) return num;
+                // else{
+                //     //number with +/- sign
+                //     trimmedStr.test(/[-+][0-9]);
+
+                // }
+                return str;
+            }
+            // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str;
+            
+        }else{ //non-numeric string
+            return str;
+        }
+    }
+}
+
+/**
+ * 
+ * @param {string} numStr without leading zeros
+ * @returns 
+ */
+function trimZeros(numStr){
+    if(numStr && numStr.indexOf(".") !== -1){//float
+        numStr = numStr.replace(/0+$/, ""); //remove ending zeros
+        if(numStr === ".")  numStr = "0";
+        else if(numStr[0] === ".")  numStr = "0"+numStr;
+        else if(numStr[numStr.length-1] === ".")  numStr = numStr.substr(0,numStr.length-1);
+        return numStr;
+    }
+    return numStr;
+}
+module.exports = toNumber
+
+
+/***/ }),
+
+/***/ 1953:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+const os = __nccwpck_require__(857);
+const tty = __nccwpck_require__(2018);
+const hasFlag = __nccwpck_require__(1330);
+
+const {env} = process;
+
+let forceColor;
+if (hasFlag('no-color') ||
+	hasFlag('no-colors') ||
+	hasFlag('color=false') ||
+	hasFlag('color=never')) {
+	forceColor = 0;
+} else if (hasFlag('color') ||
+	hasFlag('colors') ||
+	hasFlag('color=true') ||
+	hasFlag('color=always')) {
+	forceColor = 1;
+}
+
+if ('FORCE_COLOR' in env) {
+	if (env.FORCE_COLOR === 'true') {
+		forceColor = 1;
+	} else if (env.FORCE_COLOR === 'false') {
+		forceColor = 0;
+	} else {
+		forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);
+	}
+}
+
+function translateLevel(level) {
+	if (level === 0) {
+		return false;
+	}
+
+	return {
+		level,
+		hasBasic: true,
+		has256: level >= 2,
+		has16m: level >= 3
+	};
+}
+
+function supportsColor(haveStream, streamIsTTY) {
+	if (forceColor === 0) {
+		return 0;
+	}
+
+	if (hasFlag('color=16m') ||
+		hasFlag('color=full') ||
+		hasFlag('color=truecolor')) {
+		return 3;
+	}
+
+	if (hasFlag('color=256')) {
+		return 2;
+	}
+
+	if (haveStream && !streamIsTTY && forceColor === undefined) {
+		return 0;
+	}
+
+	const min = forceColor || 0;
+
+	if (env.TERM === 'dumb') {
+		return min;
+	}
+
+	if (process.platform === 'win32') {
+		// Windows 10 build 10586 is the first Windows release that supports 256 colors.
+		// Windows 10 build 14931 is the first release that supports 16m/TrueColor.
+		const osRelease = os.release().split('.');
+		if (
+			Number(osRelease[0]) >= 10 &&
+			Number(osRelease[2]) >= 10586
+		) {
+			return Number(osRelease[2]) >= 14931 ? 3 : 2;
+		}
+
+		return 1;
+	}
+
+	if ('CI' in env) {
+		if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+			return 1;
+		}
+
+		return min;
+	}
+
+	if ('TEAMCITY_VERSION' in env) {
+		return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
+	}
+
+	if (env.COLORTERM === 'truecolor') {
+		return 3;
+	}
+
+	if ('TERM_PROGRAM' in env) {
+		const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
+
+		switch (env.TERM_PROGRAM) {
+			case 'iTerm.app':
+				return version >= 3 ? 3 : 2;
+			case 'Apple_Terminal':
+				return 2;
+			// No default
+		}
+	}
+
+	if (/-256(color)?$/i.test(env.TERM)) {
+		return 2;
+	}
+
+	if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
+		return 1;
+	}
+
+	if ('COLORTERM' in env) {
+		return 1;
+	}
+
+	return min;
+}
+
+function getSupportLevel(stream) {
+	const level = supportsColor(stream, stream && stream.isTTY);
+	return translateLevel(level);
+}
+
+module.exports = {
+	supportsColor: getSupportLevel,
+	stdout: translateLevel(supportsColor(true, tty.isatty(1))),
+	stderr: translateLevel(supportsColor(true, tty.isatty(2)))
+};
+
+
+/***/ }),
+
+/***/ 1577:
+/***/ ((module) => {
+
+/******************************************************************************
+Copyright (c) Microsoft Corporation.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+***************************************************************************** */
+/* global global, define, Symbol, Reflect, Promise, SuppressedError, Iterator */
+var __extends;
+var __assign;
+var __rest;
+var __decorate;
+var __param;
+var __esDecorate;
+var __runInitializers;
+var __propKey;
+var __setFunctionName;
+var __metadata;
+var __awaiter;
+var __generator;
+var __exportStar;
+var __values;
+var __read;
+var __spread;
+var __spreadArrays;
+var __spreadArray;
+var __await;
+var __asyncGenerator;
+var __asyncDelegator;
+var __asyncValues;
+var __makeTemplateObject;
+var __importStar;
+var __importDefault;
+var __classPrivateFieldGet;
+var __classPrivateFieldSet;
+var __classPrivateFieldIn;
+var __createBinding;
+var __addDisposableResource;
+var __disposeResources;
+var __rewriteRelativeImportExtension;
+(function (factory) {
+    var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
+    if (typeof define === "function" && define.amd) {
+        define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
+    }
+    else if ( true && typeof module.exports === "object") {
+        factory(createExporter(root, createExporter(module.exports)));
+    }
+    else {
+        factory(createExporter(root));
+    }
+    function createExporter(exports, previous) {
+        if (exports !== root) {
+            if (typeof Object.create === "function") {
+                Object.defineProperty(exports, "__esModule", { value: true });
+            }
+            else {
+                exports.__esModule = true;
+            }
+        }
+        return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
+    }
+})
+(function (exporter) {
+    var extendStatics = Object.setPrototypeOf ||
+        ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+        function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+
+    __extends = function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+
+    __assign = Object.assign || function (t) {
+        for (var s, i = 1, n = arguments.length; i < n; i++) {
+            s = arguments[i];
+            for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
+        }
+        return t;
+    };
+
+    __rest = function (s, e) {
+        var t = {};
+        for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
+            t[p] = s[p];
+        if (s != null && typeof Object.getOwnPropertySymbols === "function")
+            for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
+                if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
+                    t[p[i]] = s[p[i]];
+            }
+        return t;
+    };
+
+    __decorate = function (decorators, target, key, desc) {
+        var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
+        if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
+        else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
+        return c > 3 && r && Object.defineProperty(target, key, r), r;
+    };
+
+    __param = function (paramIndex, decorator) {
+        return function (target, key) { decorator(target, key, paramIndex); }
+    };
+
+    __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) {
+        function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; }
+        var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value";
+        var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null;
+        var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {});
+        var _, done = false;
+        for (var i = decorators.length - 1; i >= 0; i--) {
+            var context = {};
+            for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p];
+            for (var p in contextIn.access) context.access[p] = contextIn.access[p];
+            context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); };
+            var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context);
+            if (kind === "accessor") {
+                if (result === void 0) continue;
+                if (result === null || typeof result !== "object") throw new TypeError("Object expected");
+                if (_ = accept(result.get)) descriptor.get = _;
+                if (_ = accept(result.set)) descriptor.set = _;
+                if (_ = accept(result.init)) initializers.unshift(_);
+            }
+            else if (_ = accept(result)) {
+                if (kind === "field") initializers.unshift(_);
+                else descriptor[key] = _;
+            }
+        }
+        if (target) Object.defineProperty(target, contextIn.name, descriptor);
+        done = true;
+    };
+
+    __runInitializers = function (thisArg, initializers, value) {
+        var useValue = arguments.length > 2;
+        for (var i = 0; i < initializers.length; i++) {
+            value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg);
+        }
+        return useValue ? value : void 0;
+    };
+
+    __propKey = function (x) {
+        return typeof x === "symbol" ? x : "".concat(x);
+    };
+
+    __setFunctionName = function (f, name, prefix) {
+        if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : "";
+        return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name });
+    };
+
+    __metadata = function (metadataKey, metadataValue) {
+        if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
+    };
+
+    __awaiter = function (thisArg, _arguments, P, generator) {
+        function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+        return new (P || (P = Promise))(function (resolve, reject) {
+            function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+            function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+            function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+            step((generator = generator.apply(thisArg, _arguments || [])).next());
+        });
+    };
+
+    __generator = function (thisArg, body) {
+        var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
+        return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+        function verb(n) { return function (v) { return step([n, v]); }; }
+        function step(op) {
+            if (f) throw new TypeError("Generator is already executing.");
+            while (g && (g = 0, op[0] && (_ = 0)), _) try {
+                if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+                if (y = 0, t) op = [op[0] & 2, t.value];
+                switch (op[0]) {
+                    case 0: case 1: t = op; break;
+                    case 4: _.label++; return { value: op[1], done: false };
+                    case 5: _.label++; y = op[1]; op = [0]; continue;
+                    case 7: op = _.ops.pop(); _.trys.pop(); continue;
+                    default:
+                        if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+                        if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+                        if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+                        if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+                        if (t[2]) _.ops.pop();
+                        _.trys.pop(); continue;
+                }
+                op = body.call(thisArg, _);
+            } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+            if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+        }
+    };
+
+    __exportStar = function(m, o) {
+        for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
+    };
+
+    __createBinding = Object.create ? (function(o, m, k, k2) {
+        if (k2 === undefined) k2 = k;
+        var desc = Object.getOwnPropertyDescriptor(m, k);
+        if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+            desc = { enumerable: true, get: function() { return m[k]; } };
+        }
+        Object.defineProperty(o, k2, desc);
+    }) : (function(o, m, k, k2) {
+        if (k2 === undefined) k2 = k;
+        o[k2] = m[k];
+    });
+
+    __values = function (o) {
+        var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
+        if (m) return m.call(o);
+        if (o && typeof o.length === "number") return {
+            next: function () {
+                if (o && i >= o.length) o = void 0;
+                return { value: o && o[i++], done: !o };
+            }
+        };
+        throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+    };
+
+    __read = function (o, n) {
+        var m = typeof Symbol === "function" && o[Symbol.iterator];
+        if (!m) return o;
+        var i = m.call(o), r, ar = [], e;
+        try {
+            while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
+        }
+        catch (error) { e = { error: error }; }
+        finally {
+            try {
+                if (r && !r.done && (m = i["return"])) m.call(i);
+            }
+            finally { if (e) throw e.error; }
+        }
+        return ar;
+    };
+
+    /** @deprecated */
+    __spread = function () {
+        for (var ar = [], i = 0; i < arguments.length; i++)
+            ar = ar.concat(__read(arguments[i]));
+        return ar;
+    };
+
+    /** @deprecated */
+    __spreadArrays = function () {
+        for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
+        for (var r = Array(s), k = 0, i = 0; i < il; i++)
+            for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
+                r[k] = a[j];
+        return r;
+    };
+
+    __spreadArray = function (to, from, pack) {
+        if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
+            if (ar || !(i in from)) {
+                if (!ar) ar = Array.prototype.slice.call(from, 0, i);
+                ar[i] = from[i];
+            }
+        }
+        return to.concat(ar || Array.prototype.slice.call(from));
+    };
+
+    __await = function (v) {
+        return this instanceof __await ? (this.v = v, this) : new __await(v);
+    };
+
+    __asyncGenerator = function (thisArg, _arguments, generator) {
+        if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+        var g = generator.apply(thisArg, _arguments || []), i, q = [];
+        return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i;
+        function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; }
+        function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } }
+        function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+        function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+        function fulfill(value) { resume("next", value); }
+        function reject(value) { resume("throw", value); }
+        function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+    };
+
+    __asyncDelegator = function (o) {
+        var i, p;
+        return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
+        function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; }
+    };
+
+    __asyncValues = function (o) {
+        if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+        var m = o[Symbol.asyncIterator], i;
+        return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+        function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+        function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+    };
+
+    __makeTemplateObject = function (cooked, raw) {
+        if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
+        return cooked;
+    };
+
+    var __setModuleDefault = Object.create ? (function(o, v) {
+        Object.defineProperty(o, "default", { enumerable: true, value: v });
+    }) : function(o, v) {
+        o["default"] = v;
+    };
+
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+
+    __importStar = function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+
+    __importDefault = function (mod) {
+        return (mod && mod.__esModule) ? mod : { "default": mod };
+    };
+
+    __classPrivateFieldGet = function (receiver, state, kind, f) {
+        if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
+        if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
+        return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
+    };
+
+    __classPrivateFieldSet = function (receiver, state, value, kind, f) {
+        if (kind === "m") throw new TypeError("Private method is not writable");
+        if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
+        if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
+        return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
+    };
+
+    __classPrivateFieldIn = function (state, receiver) {
+        if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object");
+        return typeof state === "function" ? receiver === state : state.has(receiver);
+    };
+
+    __addDisposableResource = function (env, value, async) {
+        if (value !== null && value !== void 0) {
+            if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
+            var dispose, inner;
+            if (async) {
+                if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
+                dispose = value[Symbol.asyncDispose];
+            }
+            if (dispose === void 0) {
+                if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
+                dispose = value[Symbol.dispose];
+                if (async) inner = dispose;
+            }
+            if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
+            if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
+            env.stack.push({ value: value, dispose: dispose, async: async });
+        }
+        else if (async) {
+            env.stack.push({ async: true });
+        }
+        return value;
+    };
+
+    var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
+        var e = new Error(message);
+        return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
+    };
+
+    __disposeResources = function (env) {
+        function fail(e) {
+            env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
+            env.hasError = true;
+        }
+        var r, s = 0;
+        function next() {
+            while (r = env.stack.pop()) {
+                try {
+                    if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
+                    if (r.dispose) {
+                        var result = r.dispose.call(r.value);
+                        if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
+                    }
+                    else s |= 1;
+                }
+                catch (e) {
+                    fail(e);
+                }
+            }
+            if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
+            if (env.hasError) throw env.error;
+        }
+        return next();
+    };
+
+    __rewriteRelativeImportExtension = function (path, preserveJsx) {
+        if (typeof path === "string" && /^\.\.?\//.test(path)) {
+            return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) {
+                return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js");
+            });
+        }
+        return path;
+    };
+
+    exporter("__extends", __extends);
+    exporter("__assign", __assign);
+    exporter("__rest", __rest);
+    exporter("__decorate", __decorate);
+    exporter("__param", __param);
+    exporter("__esDecorate", __esDecorate);
+    exporter("__runInitializers", __runInitializers);
+    exporter("__propKey", __propKey);
+    exporter("__setFunctionName", __setFunctionName);
+    exporter("__metadata", __metadata);
+    exporter("__awaiter", __awaiter);
+    exporter("__generator", __generator);
+    exporter("__exportStar", __exportStar);
+    exporter("__createBinding", __createBinding);
+    exporter("__values", __values);
+    exporter("__read", __read);
+    exporter("__spread", __spread);
+    exporter("__spreadArrays", __spreadArrays);
+    exporter("__spreadArray", __spreadArray);
+    exporter("__await", __await);
+    exporter("__asyncGenerator", __asyncGenerator);
+    exporter("__asyncDelegator", __asyncDelegator);
+    exporter("__asyncValues", __asyncValues);
+    exporter("__makeTemplateObject", __makeTemplateObject);
+    exporter("__importStar", __importStar);
+    exporter("__importDefault", __importDefault);
+    exporter("__classPrivateFieldGet", __classPrivateFieldGet);
+    exporter("__classPrivateFieldSet", __classPrivateFieldSet);
+    exporter("__classPrivateFieldIn", __classPrivateFieldIn);
+    exporter("__addDisposableResource", __addDisposableResource);
+    exporter("__disposeResources", __disposeResources);
+    exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension);
+});
+
+0 && (0);
+
+
+/***/ }),
+
+/***/ 6124:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = __nccwpck_require__(3660);
+
+
+/***/ }),
+
+/***/ 3660:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+
+var net = __nccwpck_require__(9278);
+var tls = __nccwpck_require__(4756);
+var http = __nccwpck_require__(8611);
+var https = __nccwpck_require__(5692);
+var events = __nccwpck_require__(4434);
+var assert = __nccwpck_require__(2613);
+var util = __nccwpck_require__(9023);
+
+
+exports.httpOverHttp = httpOverHttp;
+exports.httpsOverHttp = httpsOverHttp;
+exports.httpOverHttps = httpOverHttps;
+exports.httpsOverHttps = httpsOverHttps;
+
+
+function httpOverHttp(options) {
+  var agent = new TunnelingAgent(options);
+  agent.request = http.request;
+  return agent;
+}
+
+function httpsOverHttp(options) {
+  var agent = new TunnelingAgent(options);
+  agent.request = http.request;
+  agent.createSocket = createSecureSocket;
+  agent.defaultPort = 443;
+  return agent;
+}
+
+function httpOverHttps(options) {
+  var agent = new TunnelingAgent(options);
+  agent.request = https.request;
+  return agent;
+}
+
+function httpsOverHttps(options) {
+  var agent = new TunnelingAgent(options);
+  agent.request = https.request;
+  agent.createSocket = createSecureSocket;
+  agent.defaultPort = 443;
+  return agent;
+}
+
+
+function TunnelingAgent(options) {
+  var self = this;
+  self.options = options || {};
+  self.proxyOptions = self.options.proxy || {};
+  self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
+  self.requests = [];
+  self.sockets = [];
+
+  self.on('free', function onFree(socket, host, port, localAddress) {
+    var options = toOptions(host, port, localAddress);
+    for (var i = 0, len = self.requests.length; i < len; ++i) {
+      var pending = self.requests[i];
+      if (pending.host === options.host && pending.port === options.port) {
+        // Detect the request to connect same origin server,
+        // reuse the connection.
+        self.requests.splice(i, 1);
+        pending.request.onSocket(socket);
+        return;
+      }
+    }
+    socket.destroy();
+    self.removeSocket(socket);
+  });
+}
+util.inherits(TunnelingAgent, events.EventEmitter);
+
+TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
+  var self = this;
+  var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
+
+  if (self.sockets.length >= this.maxSockets) {
+    // We are over limit so we'll add it to the queue.
+    self.requests.push(options);
+    return;
+  }
+
+  // If we are under maxSockets create a new one.
+  self.createSocket(options, function(socket) {
+    socket.on('free', onFree);
+    socket.on('close', onCloseOrRemove);
+    socket.on('agentRemove', onCloseOrRemove);
+    req.onSocket(socket);
+
+    function onFree() {
+      self.emit('free', socket, options);
+    }
+
+    function onCloseOrRemove(err) {
+      self.removeSocket(socket);
+      socket.removeListener('free', onFree);
+      socket.removeListener('close', onCloseOrRemove);
+      socket.removeListener('agentRemove', onCloseOrRemove);
+    }
+  });
+};
+
+TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
+  var self = this;
+  var placeholder = {};
+  self.sockets.push(placeholder);
+
+  var connectOptions = mergeOptions({}, self.proxyOptions, {
+    method: 'CONNECT',
+    path: options.host + ':' + options.port,
+    agent: false,
+    headers: {
+      host: options.host + ':' + options.port
+    }
+  });
+  if (options.localAddress) {
+    connectOptions.localAddress = options.localAddress;
+  }
+  if (connectOptions.proxyAuth) {
+    connectOptions.headers = connectOptions.headers || {};
+    connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
+        new Buffer(connectOptions.proxyAuth).toString('base64');
+  }
+
+  debug('making CONNECT request');
+  var connectReq = self.request(connectOptions);
+  connectReq.useChunkedEncodingByDefault = false; // for v0.6
+  connectReq.once('response', onResponse); // for v0.6
+  connectReq.once('upgrade', onUpgrade);   // for v0.6
+  connectReq.once('connect', onConnect);   // for v0.7 or later
+  connectReq.once('error', onError);
+  connectReq.end();
+
+  function onResponse(res) {
+    // Very hacky. This is necessary to avoid http-parser leaks.
+    res.upgrade = true;
+  }
+
+  function onUpgrade(res, socket, head) {
+    // Hacky.
+    process.nextTick(function() {
+      onConnect(res, socket, head);
+    });
+  }
+
+  function onConnect(res, socket, head) {
+    connectReq.removeAllListeners();
+    socket.removeAllListeners();
+
+    if (res.statusCode !== 200) {
+      debug('tunneling socket could not be established, statusCode=%d',
+        res.statusCode);
+      socket.destroy();
+      var error = new Error('tunneling socket could not be established, ' +
+        'statusCode=' + res.statusCode);
+      error.code = 'ECONNRESET';
+      options.request.emit('error', error);
+      self.removeSocket(placeholder);
+      return;
+    }
+    if (head.length > 0) {
+      debug('got illegal response body from proxy');
+      socket.destroy();
+      var error = new Error('got illegal response body from proxy');
+      error.code = 'ECONNRESET';
+      options.request.emit('error', error);
+      self.removeSocket(placeholder);
+      return;
+    }
+    debug('tunneling connection has established');
+    self.sockets[self.sockets.indexOf(placeholder)] = socket;
+    return cb(socket);
+  }
+
+  function onError(cause) {
+    connectReq.removeAllListeners();
+
+    debug('tunneling socket could not be established, cause=%s\n',
+          cause.message, cause.stack);
+    var error = new Error('tunneling socket could not be established, ' +
+                          'cause=' + cause.message);
+    error.code = 'ECONNRESET';
+    options.request.emit('error', error);
+    self.removeSocket(placeholder);
+  }
+};
+
+TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
+  var pos = this.sockets.indexOf(socket)
+  if (pos === -1) {
+    return;
+  }
+  this.sockets.splice(pos, 1);
+
+  var pending = this.requests.shift();
+  if (pending) {
+    // If we have pending requests and a socket gets closed a new one
+    // needs to be created to take over in the pool for the one that closed.
+    this.createSocket(pending, function(socket) {
+      pending.request.onSocket(socket);
+    });
+  }
+};
+
+function createSecureSocket(options, cb) {
+  var self = this;
+  TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
+    var hostHeader = options.request.getHeader('host');
+    var tlsOptions = mergeOptions({}, self.options, {
+      socket: socket,
+      servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
+    });
+
+    // 0 is dummy port for v0.6
+    var secureSocket = tls.connect(0, tlsOptions);
+    self.sockets[self.sockets.indexOf(socket)] = secureSocket;
+    cb(secureSocket);
+  });
+}
+
+
+function toOptions(host, port, localAddress) {
+  if (typeof host === 'string') { // since v0.10
+    return {
+      host: host,
+      port: port,
+      localAddress: localAddress
+    };
+  }
+  return host; // for v0.11 or later
+}
+
+function mergeOptions(target) {
+  for (var i = 1, len = arguments.length; i < len; ++i) {
+    var overrides = arguments[i];
+    if (typeof overrides === 'object') {
+      var keys = Object.keys(overrides);
+      for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
+        var k = keys[j];
+        if (overrides[k] !== undefined) {
+          target[k] = overrides[k];
+        }
+      }
+    }
+  }
+  return target;
+}
+
+
+var debug;
+if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
+  debug = function() {
+    var args = Array.prototype.slice.call(arguments);
+    if (typeof args[0] === 'string') {
+      args[0] = 'TUNNEL: ' + args[0];
+    } else {
+      args.unshift('TUNNEL:');
+    }
+    console.error.apply(console, args);
+  }
+} else {
+  debug = function() {};
+}
+exports.debug = debug; // for test
+
+
+/***/ }),
+
+/***/ 1909:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const Client = __nccwpck_require__(6646)
+const Dispatcher = __nccwpck_require__(9724)
+const errors = __nccwpck_require__(3862)
+const Pool = __nccwpck_require__(7787)
+const BalancedPool = __nccwpck_require__(7568)
+const Agent = __nccwpck_require__(9072)
+const util = __nccwpck_require__(3465)
+const { InvalidArgumentError } = errors
+const api = __nccwpck_require__(4598)
+const buildConnector = __nccwpck_require__(2559)
+const MockClient = __nccwpck_require__(962)
+const MockAgent = __nccwpck_require__(5332)
+const MockPool = __nccwpck_require__(2975)
+const mockErrors = __nccwpck_require__(4222)
+const ProxyAgent = __nccwpck_require__(2145)
+const RetryHandler = __nccwpck_require__(3966)
+const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(7882)
+const DecoratorHandler = __nccwpck_require__(8975)
+const RedirectHandler = __nccwpck_require__(238)
+const createRedirectInterceptor = __nccwpck_require__(2130)
+
+let hasCrypto
+try {
+  __nccwpck_require__(6982)
+  hasCrypto = true
+} catch {
+  hasCrypto = false
+}
+
+Object.assign(Dispatcher.prototype, api)
+
+module.exports.Dispatcher = Dispatcher
+module.exports.Client = Client
+module.exports.Pool = Pool
+module.exports.BalancedPool = BalancedPool
+module.exports.Agent = Agent
+module.exports.ProxyAgent = ProxyAgent
+module.exports.RetryHandler = RetryHandler
+
+module.exports.DecoratorHandler = DecoratorHandler
+module.exports.RedirectHandler = RedirectHandler
+module.exports.createRedirectInterceptor = createRedirectInterceptor
+
+module.exports.buildConnector = buildConnector
+module.exports.errors = errors
+
+function makeDispatcher (fn) {
+  return (url, opts, handler) => {
+    if (typeof opts === 'function') {
+      handler = opts
+      opts = null
+    }
+
+    if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) {
+      throw new InvalidArgumentError('invalid url')
+    }
+
+    if (opts != null && typeof opts !== 'object') {
+      throw new InvalidArgumentError('invalid opts')
+    }
+
+    if (opts && opts.path != null) {
+      if (typeof opts.path !== 'string') {
+        throw new InvalidArgumentError('invalid opts.path')
+      }
+
+      let path = opts.path
+      if (!opts.path.startsWith('/')) {
+        path = `/${path}`
+      }
+
+      url = new URL(util.parseOrigin(url).origin + path)
+    } else {
+      if (!opts) {
+        opts = typeof url === 'object' ? url : {}
+      }
+
+      url = util.parseURL(url)
+    }
+
+    const { agent, dispatcher = getGlobalDispatcher() } = opts
+
+    if (agent) {
+      throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?')
+    }
+
+    return fn.call(dispatcher, {
+      ...opts,
+      origin: url.origin,
+      path: url.search ? `${url.pathname}${url.search}` : url.pathname,
+      method: opts.method || (opts.body ? 'PUT' : 'GET')
+    }, handler)
+  }
+}
+
+module.exports.setGlobalDispatcher = setGlobalDispatcher
+module.exports.getGlobalDispatcher = getGlobalDispatcher
+
+if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {
+  let fetchImpl = null
+  module.exports.fetch = async function fetch (resource) {
+    if (!fetchImpl) {
+      fetchImpl = (__nccwpck_require__(8358).fetch)
+    }
+
+    try {
+      return await fetchImpl(...arguments)
+    } catch (err) {
+      if (typeof err === 'object') {
+        Error.captureStackTrace(err, this)
+      }
+
+      throw err
+    }
+  }
+  module.exports.Headers = __nccwpck_require__(2908).Headers
+  module.exports.Response = __nccwpck_require__(5683).Response
+  module.exports.Request = __nccwpck_require__(5767).Request
+  module.exports.FormData = __nccwpck_require__(2894).FormData
+  module.exports.File = __nccwpck_require__(66).File
+  module.exports.FileReader = __nccwpck_require__(2171).FileReader
+
+  const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(9963)
+
+  module.exports.setGlobalOrigin = setGlobalOrigin
+  module.exports.getGlobalOrigin = getGlobalOrigin
+
+  const { CacheStorage } = __nccwpck_require__(9605)
+  const { kConstruct } = __nccwpck_require__(1829)
+
+  // Cache & CacheStorage are tightly coupled with fetch. Even if it may run
+  // in an older version of Node, it doesn't have any use without fetch.
+  module.exports.caches = new CacheStorage(kConstruct)
+}
+
+if (util.nodeMajor >= 16) {
+  const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(8829)
+
+  module.exports.deleteCookie = deleteCookie
+  module.exports.getCookies = getCookies
+  module.exports.getSetCookies = getSetCookies
+  module.exports.setCookie = setCookie
+
+  const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(3911)
+
+  module.exports.parseMIMEType = parseMIMEType
+  module.exports.serializeAMimeType = serializeAMimeType
+}
+
+if (util.nodeMajor >= 18 && hasCrypto) {
+  const { WebSocket } = __nccwpck_require__(6102)
+
+  module.exports.WebSocket = WebSocket
+}
+
+module.exports.request = makeDispatcher(api.request)
+module.exports.stream = makeDispatcher(api.stream)
+module.exports.pipeline = makeDispatcher(api.pipeline)
+module.exports.connect = makeDispatcher(api.connect)
+module.exports.upgrade = makeDispatcher(api.upgrade)
+
+module.exports.MockClient = MockClient
+module.exports.MockPool = MockPool
+module.exports.MockAgent = MockAgent
+module.exports.mockErrors = mockErrors
+
+
+/***/ }),
+
+/***/ 9072:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(4856)
+const DispatcherBase = __nccwpck_require__(9368)
+const Pool = __nccwpck_require__(7787)
+const Client = __nccwpck_require__(6646)
+const util = __nccwpck_require__(3465)
+const createRedirectInterceptor = __nccwpck_require__(2130)
+const { WeakRef, FinalizationRegistry } = __nccwpck_require__(547)()
+
+const kOnConnect = Symbol('onConnect')
+const kOnDisconnect = Symbol('onDisconnect')
+const kOnConnectionError = Symbol('onConnectionError')
+const kMaxRedirections = Symbol('maxRedirections')
+const kOnDrain = Symbol('onDrain')
+const kFactory = Symbol('factory')
+const kFinalizer = Symbol('finalizer')
+const kOptions = Symbol('options')
+
+function defaultFactory (origin, opts) {
+  return opts && opts.connections === 1
+    ? new Client(origin, opts)
+    : new Pool(origin, opts)
+}
+
+class Agent extends DispatcherBase {
+  constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) {
+    super()
+
+    if (typeof factory !== 'function') {
+      throw new InvalidArgumentError('factory must be a function.')
+    }
+
+    if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+      throw new InvalidArgumentError('connect must be a function or an object')
+    }
+
+    if (!Number.isInteger(maxRedirections) || maxRedirections < 0) {
+      throw new InvalidArgumentError('maxRedirections must be a positive number')
+    }
+
+    if (connect && typeof connect !== 'function') {
+      connect = { ...connect }
+    }
+
+    this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent)
+      ? options.interceptors.Agent
+      : [createRedirectInterceptor({ maxRedirections })]
+
+    this[kOptions] = { ...util.deepClone(options), connect }
+    this[kOptions].interceptors = options.interceptors
+      ? { ...options.interceptors }
+      : undefined
+    this[kMaxRedirections] = maxRedirections
+    this[kFactory] = factory
+    this[kClients] = new Map()
+    this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => {
+      const ref = this[kClients].get(key)
+      if (ref !== undefined && ref.deref() === undefined) {
+        this[kClients].delete(key)
+      }
+    })
+
+    const agent = this
+
+    this[kOnDrain] = (origin, targets) => {
+      agent.emit('drain', origin, [agent, ...targets])
+    }
+
+    this[kOnConnect] = (origin, targets) => {
+      agent.emit('connect', origin, [agent, ...targets])
+    }
+
+    this[kOnDisconnect] = (origin, targets, err) => {
+      agent.emit('disconnect', origin, [agent, ...targets], err)
+    }
+
+    this[kOnConnectionError] = (origin, targets, err) => {
+      agent.emit('connectionError', origin, [agent, ...targets], err)
+    }
+  }
+
+  get [kRunning] () {
+    let ret = 0
+    for (const ref of this[kClients].values()) {
+      const client = ref.deref()
+      /* istanbul ignore next: gc is undeterministic */
+      if (client) {
+        ret += client[kRunning]
+      }
+    }
+    return ret
+  }
+
+  [kDispatch] (opts, handler) {
+    let key
+    if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) {
+      key = String(opts.origin)
+    } else {
+      throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.')
+    }
+
+    const ref = this[kClients].get(key)
+
+    let dispatcher = ref ? ref.deref() : null
+    if (!dispatcher) {
+      dispatcher = this[kFactory](opts.origin, this[kOptions])
+        .on('drain', this[kOnDrain])
+        .on('connect', this[kOnConnect])
+        .on('disconnect', this[kOnDisconnect])
+        .on('connectionError', this[kOnConnectionError])
+
+      this[kClients].set(key, new WeakRef(dispatcher))
+      this[kFinalizer].register(dispatcher, key)
+    }
+
+    return dispatcher.dispatch(opts, handler)
+  }
+
+  async [kClose] () {
+    const closePromises = []
+    for (const ref of this[kClients].values()) {
+      const client = ref.deref()
+      /* istanbul ignore else: gc is undeterministic */
+      if (client) {
+        closePromises.push(client.close())
+      }
+    }
+
+    await Promise.all(closePromises)
+  }
+
+  async [kDestroy] (err) {
+    const destroyPromises = []
+    for (const ref of this[kClients].values()) {
+      const client = ref.deref()
+      /* istanbul ignore else: gc is undeterministic */
+      if (client) {
+        destroyPromises.push(client.destroy(err))
+      }
+    }
+
+    await Promise.all(destroyPromises)
+  }
+}
+
+module.exports = Agent
+
+
+/***/ }),
+
+/***/ 4541:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const { addAbortListener } = __nccwpck_require__(3465)
+const { RequestAbortedError } = __nccwpck_require__(3862)
+
+const kListener = Symbol('kListener')
+const kSignal = Symbol('kSignal')
+
+function abort (self) {
+  if (self.abort) {
+    self.abort()
+  } else {
+    self.onError(new RequestAbortedError())
+  }
+}
+
+function addSignal (self, signal) {
+  self[kSignal] = null
+  self[kListener] = null
+
+  if (!signal) {
+    return
+  }
+
+  if (signal.aborted) {
+    abort(self)
+    return
+  }
+
+  self[kSignal] = signal
+  self[kListener] = () => {
+    abort(self)
+  }
+
+  addAbortListener(self[kSignal], self[kListener])
+}
+
+function removeSignal (self) {
+  if (!self[kSignal]) {
+    return
+  }
+
+  if ('removeEventListener' in self[kSignal]) {
+    self[kSignal].removeEventListener('abort', self[kListener])
+  } else {
+    self[kSignal].removeListener('abort', self[kListener])
+  }
+
+  self[kSignal] = null
+  self[kListener] = null
+}
+
+module.exports = {
+  addSignal,
+  removeSignal
+}
+
+
+/***/ }),
+
+/***/ 2657:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { AsyncResource } = __nccwpck_require__(290)
+const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(3862)
+const util = __nccwpck_require__(3465)
+const { addSignal, removeSignal } = __nccwpck_require__(4541)
+
+class ConnectHandler extends AsyncResource {
+  constructor (opts, callback) {
+    if (!opts || typeof opts !== 'object') {
+      throw new InvalidArgumentError('invalid opts')
+    }
+
+    if (typeof callback !== 'function') {
+      throw new InvalidArgumentError('invalid callback')
+    }
+
+    const { signal, opaque, responseHeaders } = opts
+
+    if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+      throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+    }
+
+    super('UNDICI_CONNECT')
+
+    this.opaque = opaque || null
+    this.responseHeaders = responseHeaders || null
+    this.callback = callback
+    this.abort = null
+
+    addSignal(this, signal)
+  }
+
+  onConnect (abort, context) {
+    if (!this.callback) {
+      throw new RequestAbortedError()
+    }
+
+    this.abort = abort
+    this.context = context
+  }
+
+  onHeaders () {
+    throw new SocketError('bad connect', null)
+  }
+
+  onUpgrade (statusCode, rawHeaders, socket) {
+    const { callback, opaque, context } = this
+
+    removeSignal(this)
+
+    this.callback = null
+
+    let headers = rawHeaders
+    // Indicates is an HTTP2Session
+    if (headers != null) {
+      headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+    }
+
+    this.runInAsyncScope(callback, null, null, {
+      statusCode,
+      headers,
+      socket,
+      opaque,
+      context
+    })
+  }
+
+  onError (err) {
+    const { callback, opaque } = this
+
+    removeSignal(this)
+
+    if (callback) {
+      this.callback = null
+      queueMicrotask(() => {
+        this.runInAsyncScope(callback, null, err, { opaque })
+      })
+    }
+  }
+}
+
+function connect (opts, callback) {
+  if (callback === undefined) {
+    return new Promise((resolve, reject) => {
+      connect.call(this, opts, (err, data) => {
+        return err ? reject(err) : resolve(data)
+      })
+    })
+  }
+
+  try {
+    const connectHandler = new ConnectHandler(opts, callback)
+    this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler)
+  } catch (err) {
+    if (typeof callback !== 'function') {
+      throw err
+    }
+    const opaque = opts && opts.opaque
+    queueMicrotask(() => callback(err, { opaque }))
+  }
+}
+
+module.exports = connect
+
+
+/***/ }),
+
+/***/ 681:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const {
+  Readable,
+  Duplex,
+  PassThrough
+} = __nccwpck_require__(2203)
+const {
+  InvalidArgumentError,
+  InvalidReturnValueError,
+  RequestAbortedError
+} = __nccwpck_require__(3862)
+const util = __nccwpck_require__(3465)
+const { AsyncResource } = __nccwpck_require__(290)
+const { addSignal, removeSignal } = __nccwpck_require__(4541)
+const assert = __nccwpck_require__(2613)
+
+const kResume = Symbol('resume')
+
+class PipelineRequest extends Readable {
+  constructor () {
+    super({ autoDestroy: true })
+
+    this[kResume] = null
+  }
+
+  _read () {
+    const { [kResume]: resume } = this
+
+    if (resume) {
+      this[kResume] = null
+      resume()
+    }
+  }
+
+  _destroy (err, callback) {
+    this._read()
+
+    callback(err)
+  }
+}
+
+class PipelineResponse extends Readable {
+  constructor (resume) {
+    super({ autoDestroy: true })
+    this[kResume] = resume
+  }
+
+  _read () {
+    this[kResume]()
+  }
+
+  _destroy (err, callback) {
+    if (!err && !this._readableState.endEmitted) {
+      err = new RequestAbortedError()
+    }
+
+    callback(err)
+  }
+}
+
+class PipelineHandler extends AsyncResource {
+  constructor (opts, handler) {
+    if (!opts || typeof opts !== 'object') {
+      throw new InvalidArgumentError('invalid opts')
+    }
+
+    if (typeof handler !== 'function') {
+      throw new InvalidArgumentError('invalid handler')
+    }
+
+    const { signal, method, opaque, onInfo, responseHeaders } = opts
+
+    if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+      throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+    }
+
+    if (method === 'CONNECT') {
+      throw new InvalidArgumentError('invalid method')
+    }
+
+    if (onInfo && typeof onInfo !== 'function') {
+      throw new InvalidArgumentError('invalid onInfo callback')
+    }
+
+    super('UNDICI_PIPELINE')
+
+    this.opaque = opaque || null
+    this.responseHeaders = responseHeaders || null
+    this.handler = handler
+    this.abort = null
+    this.context = null
+    this.onInfo = onInfo || null
+
+    this.req = new PipelineRequest().on('error', util.nop)
+
+    this.ret = new Duplex({
+      readableObjectMode: opts.objectMode,
+      autoDestroy: true,
+      read: () => {
+        const { body } = this
+
+        if (body && body.resume) {
+          body.resume()
+        }
+      },
+      write: (chunk, encoding, callback) => {
+        const { req } = this
+
+        if (req.push(chunk, encoding) || req._readableState.destroyed) {
+          callback()
+        } else {
+          req[kResume] = callback
+        }
+      },
+      destroy: (err, callback) => {
+        const { body, req, res, ret, abort } = this
+
+        if (!err && !ret._readableState.endEmitted) {
+          err = new RequestAbortedError()
+        }
+
+        if (abort && err) {
+          abort()
+        }
+
+        util.destroy(body, err)
+        util.destroy(req, err)
+        util.destroy(res, err)
+
+        removeSignal(this)
+
+        callback(err)
+      }
+    }).on('prefinish', () => {
+      const { req } = this
+
+      // Node < 15 does not call _final in same tick.
+      req.push(null)
+    })
+
+    this.res = null
+
+    addSignal(this, signal)
+  }
+
+  onConnect (abort, context) {
+    const { ret, res } = this
+
+    assert(!res, 'pipeline cannot be retried')
+
+    if (ret.destroyed) {
+      throw new RequestAbortedError()
+    }
+
+    this.abort = abort
+    this.context = context
+  }
+
+  onHeaders (statusCode, rawHeaders, resume) {
+    const { opaque, handler, context } = this
+
+    if (statusCode < 200) {
+      if (this.onInfo) {
+        const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+        this.onInfo({ statusCode, headers })
+      }
+      return
+    }
+
+    this.res = new PipelineResponse(resume)
+
+    let body
+    try {
+      this.handler = null
+      const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+      body = this.runInAsyncScope(handler, null, {
+        statusCode,
+        headers,
+        opaque,
+        body: this.res,
+        context
+      })
+    } catch (err) {
+      this.res.on('error', util.nop)
+      throw err
+    }
+
+    if (!body || typeof body.on !== 'function') {
+      throw new InvalidReturnValueError('expected Readable')
+    }
+
+    body
+      .on('data', (chunk) => {
+        const { ret, body } = this
+
+        if (!ret.push(chunk) && body.pause) {
+          body.pause()
+        }
+      })
+      .on('error', (err) => {
+        const { ret } = this
+
+        util.destroy(ret, err)
+      })
+      .on('end', () => {
+        const { ret } = this
+
+        ret.push(null)
+      })
+      .on('close', () => {
+        const { ret } = this
+
+        if (!ret._readableState.ended) {
+          util.destroy(ret, new RequestAbortedError())
+        }
+      })
+
+    this.body = body
+  }
+
+  onData (chunk) {
+    const { res } = this
+    return res.push(chunk)
+  }
+
+  onComplete (trailers) {
+    const { res } = this
+    res.push(null)
+  }
+
+  onError (err) {
+    const { ret } = this
+    this.handler = null
+    util.destroy(ret, err)
+  }
+}
+
+function pipeline (opts, handler) {
+  try {
+    const pipelineHandler = new PipelineHandler(opts, handler)
+    this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler)
+    return pipelineHandler.ret
+  } catch (err) {
+    return new PassThrough().destroy(err)
+  }
+}
+
+module.exports = pipeline
+
+
+/***/ }),
+
+/***/ 2834:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const Readable = __nccwpck_require__(5964)
+const {
+  InvalidArgumentError,
+  RequestAbortedError
+} = __nccwpck_require__(3862)
+const util = __nccwpck_require__(3465)
+const { getResolveErrorBodyCallback } = __nccwpck_require__(4920)
+const { AsyncResource } = __nccwpck_require__(290)
+const { addSignal, removeSignal } = __nccwpck_require__(4541)
+
+class RequestHandler extends AsyncResource {
+  constructor (opts, callback) {
+    if (!opts || typeof opts !== 'object') {
+      throw new InvalidArgumentError('invalid opts')
+    }
+
+    const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts
+
+    try {
+      if (typeof callback !== 'function') {
+        throw new InvalidArgumentError('invalid callback')
+      }
+
+      if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) {
+        throw new InvalidArgumentError('invalid highWaterMark')
+      }
+
+      if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+        throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+      }
+
+      if (method === 'CONNECT') {
+        throw new InvalidArgumentError('invalid method')
+      }
+
+      if (onInfo && typeof onInfo !== 'function') {
+        throw new InvalidArgumentError('invalid onInfo callback')
+      }
+
+      super('UNDICI_REQUEST')
+    } catch (err) {
+      if (util.isStream(body)) {
+        util.destroy(body.on('error', util.nop), err)
+      }
+      throw err
+    }
+
+    this.responseHeaders = responseHeaders || null
+    this.opaque = opaque || null
+    this.callback = callback
+    this.res = null
+    this.abort = null
+    this.body = body
+    this.trailers = {}
+    this.context = null
+    this.onInfo = onInfo || null
+    this.throwOnError = throwOnError
+    this.highWaterMark = highWaterMark
+
+    if (util.isStream(body)) {
+      body.on('error', (err) => {
+        this.onError(err)
+      })
+    }
+
+    addSignal(this, signal)
+  }
+
+  onConnect (abort, context) {
+    if (!this.callback) {
+      throw new RequestAbortedError()
+    }
+
+    this.abort = abort
+    this.context = context
+  }
+
+  onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+    const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this
+
+    const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+    if (statusCode < 200) {
+      if (this.onInfo) {
+        this.onInfo({ statusCode, headers })
+      }
+      return
+    }
+
+    const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
+    const contentType = parsedHeaders['content-type']
+    const body = new Readable({ resume, abort, contentType, highWaterMark })
+
+    this.callback = null
+    this.res = body
+    if (callback !== null) {
+      if (this.throwOnError && statusCode >= 400) {
+        this.runInAsyncScope(getResolveErrorBodyCallback, null,
+          { callback, body, contentType, statusCode, statusMessage, headers }
+        )
+      } else {
+        this.runInAsyncScope(callback, null, null, {
+          statusCode,
+          headers,
+          trailers: this.trailers,
+          opaque,
+          body,
+          context
+        })
+      }
+    }
+  }
+
+  onData (chunk) {
+    const { res } = this
+    return res.push(chunk)
+  }
+
+  onComplete (trailers) {
+    const { res } = this
+
+    removeSignal(this)
+
+    util.parseHeaders(trailers, this.trailers)
+
+    res.push(null)
+  }
+
+  onError (err) {
+    const { res, callback, body, opaque } = this
+
+    removeSignal(this)
+
+    if (callback) {
+      // TODO: Does this need queueMicrotask?
+      this.callback = null
+      queueMicrotask(() => {
+        this.runInAsyncScope(callback, null, err, { opaque })
+      })
+    }
+
+    if (res) {
+      this.res = null
+      // Ensure all queued handlers are invoked before destroying res.
+      queueMicrotask(() => {
+        util.destroy(res, err)
+      })
+    }
+
+    if (body) {
+      this.body = null
+      util.destroy(body, err)
+    }
+  }
+}
+
+function request (opts, callback) {
+  if (callback === undefined) {
+    return new Promise((resolve, reject) => {
+      request.call(this, opts, (err, data) => {
+        return err ? reject(err) : resolve(data)
+      })
+    })
+  }
+
+  try {
+    this.dispatch(opts, new RequestHandler(opts, callback))
+  } catch (err) {
+    if (typeof callback !== 'function') {
+      throw err
+    }
+    const opaque = opts && opts.opaque
+    queueMicrotask(() => callback(err, { opaque }))
+  }
+}
+
+module.exports = request
+module.exports.RequestHandler = RequestHandler
+
+
+/***/ }),
+
+/***/ 6759:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { finished, PassThrough } = __nccwpck_require__(2203)
+const {
+  InvalidArgumentError,
+  InvalidReturnValueError,
+  RequestAbortedError
+} = __nccwpck_require__(3862)
+const util = __nccwpck_require__(3465)
+const { getResolveErrorBodyCallback } = __nccwpck_require__(4920)
+const { AsyncResource } = __nccwpck_require__(290)
+const { addSignal, removeSignal } = __nccwpck_require__(4541)
+
+class StreamHandler extends AsyncResource {
+  constructor (opts, factory, callback) {
+    if (!opts || typeof opts !== 'object') {
+      throw new InvalidArgumentError('invalid opts')
+    }
+
+    const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts
+
+    try {
+      if (typeof callback !== 'function') {
+        throw new InvalidArgumentError('invalid callback')
+      }
+
+      if (typeof factory !== 'function') {
+        throw new InvalidArgumentError('invalid factory')
+      }
+
+      if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+        throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+      }
+
+      if (method === 'CONNECT') {
+        throw new InvalidArgumentError('invalid method')
+      }
+
+      if (onInfo && typeof onInfo !== 'function') {
+        throw new InvalidArgumentError('invalid onInfo callback')
+      }
+
+      super('UNDICI_STREAM')
+    } catch (err) {
+      if (util.isStream(body)) {
+        util.destroy(body.on('error', util.nop), err)
+      }
+      throw err
+    }
+
+    this.responseHeaders = responseHeaders || null
+    this.opaque = opaque || null
+    this.factory = factory
+    this.callback = callback
+    this.res = null
+    this.abort = null
+    this.context = null
+    this.trailers = null
+    this.body = body
+    this.onInfo = onInfo || null
+    this.throwOnError = throwOnError || false
+
+    if (util.isStream(body)) {
+      body.on('error', (err) => {
+        this.onError(err)
+      })
+    }
+
+    addSignal(this, signal)
+  }
+
+  onConnect (abort, context) {
+    if (!this.callback) {
+      throw new RequestAbortedError()
+    }
+
+    this.abort = abort
+    this.context = context
+  }
+
+  onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+    const { factory, opaque, context, callback, responseHeaders } = this
+
+    const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+    if (statusCode < 200) {
+      if (this.onInfo) {
+        this.onInfo({ statusCode, headers })
+      }
+      return
+    }
+
+    this.factory = null
+
+    let res
+
+    if (this.throwOnError && statusCode >= 400) {
+      const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers
+      const contentType = parsedHeaders['content-type']
+      res = new PassThrough()
+
+      this.callback = null
+      this.runInAsyncScope(getResolveErrorBodyCallback, null,
+        { callback, body: res, contentType, statusCode, statusMessage, headers }
+      )
+    } else {
+      if (factory === null) {
+        return
+      }
+
+      res = this.runInAsyncScope(factory, null, {
+        statusCode,
+        headers,
+        opaque,
+        context
+      })
+
+      if (
+        !res ||
+        typeof res.write !== 'function' ||
+        typeof res.end !== 'function' ||
+        typeof res.on !== 'function'
+      ) {
+        throw new InvalidReturnValueError('expected Writable')
+      }
+
+      // TODO: Avoid finished. It registers an unnecessary amount of listeners.
+      finished(res, { readable: false }, (err) => {
+        const { callback, res, opaque, trailers, abort } = this
+
+        this.res = null
+        if (err || !res.readable) {
+          util.destroy(res, err)
+        }
+
+        this.callback = null
+        this.runInAsyncScope(callback, null, err || null, { opaque, trailers })
+
+        if (err) {
+          abort()
+        }
+      })
+    }
+
+    res.on('drain', resume)
+
+    this.res = res
+
+    const needDrain = res.writableNeedDrain !== undefined
+      ? res.writableNeedDrain
+      : res._writableState && res._writableState.needDrain
+
+    return needDrain !== true
+  }
+
+  onData (chunk) {
+    const { res } = this
+
+    return res ? res.write(chunk) : true
+  }
+
+  onComplete (trailers) {
+    const { res } = this
+
+    removeSignal(this)
+
+    if (!res) {
+      return
+    }
+
+    this.trailers = util.parseHeaders(trailers)
+
+    res.end()
+  }
+
+  onError (err) {
+    const { res, callback, opaque, body } = this
+
+    removeSignal(this)
+
+    this.factory = null
+
+    if (res) {
+      this.res = null
+      util.destroy(res, err)
+    } else if (callback) {
+      this.callback = null
+      queueMicrotask(() => {
+        this.runInAsyncScope(callback, null, err, { opaque })
+      })
+    }
+
+    if (body) {
+      this.body = null
+      util.destroy(body, err)
+    }
+  }
+}
+
+function stream (opts, factory, callback) {
+  if (callback === undefined) {
+    return new Promise((resolve, reject) => {
+      stream.call(this, opts, factory, (err, data) => {
+        return err ? reject(err) : resolve(data)
+      })
+    })
+  }
+
+  try {
+    this.dispatch(opts, new StreamHandler(opts, factory, callback))
+  } catch (err) {
+    if (typeof callback !== 'function') {
+      throw err
+    }
+    const opaque = opts && opts.opaque
+    queueMicrotask(() => callback(err, { opaque }))
+  }
+}
+
+module.exports = stream
+
+
+/***/ }),
+
+/***/ 9387:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(3862)
+const { AsyncResource } = __nccwpck_require__(290)
+const util = __nccwpck_require__(3465)
+const { addSignal, removeSignal } = __nccwpck_require__(4541)
+const assert = __nccwpck_require__(2613)
+
+class UpgradeHandler extends AsyncResource {
+  constructor (opts, callback) {
+    if (!opts || typeof opts !== 'object') {
+      throw new InvalidArgumentError('invalid opts')
+    }
+
+    if (typeof callback !== 'function') {
+      throw new InvalidArgumentError('invalid callback')
+    }
+
+    const { signal, opaque, responseHeaders } = opts
+
+    if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') {
+      throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget')
+    }
+
+    super('UNDICI_UPGRADE')
+
+    this.responseHeaders = responseHeaders || null
+    this.opaque = opaque || null
+    this.callback = callback
+    this.abort = null
+    this.context = null
+
+    addSignal(this, signal)
+  }
+
+  onConnect (abort, context) {
+    if (!this.callback) {
+      throw new RequestAbortedError()
+    }
+
+    this.abort = abort
+    this.context = null
+  }
+
+  onHeaders () {
+    throw new SocketError('bad upgrade', null)
+  }
+
+  onUpgrade (statusCode, rawHeaders, socket) {
+    const { callback, opaque, context } = this
+
+    assert.strictEqual(statusCode, 101)
+
+    removeSignal(this)
+
+    this.callback = null
+    const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+    this.runInAsyncScope(callback, null, null, {
+      headers,
+      socket,
+      opaque,
+      context
+    })
+  }
+
+  onError (err) {
+    const { callback, opaque } = this
+
+    removeSignal(this)
+
+    if (callback) {
+      this.callback = null
+      queueMicrotask(() => {
+        this.runInAsyncScope(callback, null, err, { opaque })
+      })
+    }
+  }
+}
+
+function upgrade (opts, callback) {
+  if (callback === undefined) {
+    return new Promise((resolve, reject) => {
+      upgrade.call(this, opts, (err, data) => {
+        return err ? reject(err) : resolve(data)
+      })
+    })
+  }
+
+  try {
+    const upgradeHandler = new UpgradeHandler(opts, callback)
+    this.dispatch({
+      ...opts,
+      method: opts.method || 'GET',
+      upgrade: opts.protocol || 'Websocket'
+    }, upgradeHandler)
+  } catch (err) {
+    if (typeof callback !== 'function') {
+      throw err
+    }
+    const opaque = opts && opts.opaque
+    queueMicrotask(() => callback(err, { opaque }))
+  }
+}
+
+module.exports = upgrade
+
+
+/***/ }),
+
+/***/ 4598:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+module.exports.request = __nccwpck_require__(2834)
+module.exports.stream = __nccwpck_require__(6759)
+module.exports.pipeline = __nccwpck_require__(681)
+module.exports.upgrade = __nccwpck_require__(9387)
+module.exports.connect = __nccwpck_require__(2657)
+
+
+/***/ }),
+
+/***/ 5964:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+// Ported from https://github.com/nodejs/undici/pull/907
+
+
+
+const assert = __nccwpck_require__(2613)
+const { Readable } = __nccwpck_require__(2203)
+const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(3862)
+const util = __nccwpck_require__(3465)
+const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3465)
+
+let Blob
+
+const kConsume = Symbol('kConsume')
+const kReading = Symbol('kReading')
+const kBody = Symbol('kBody')
+const kAbort = Symbol('abort')
+const kContentType = Symbol('kContentType')
+
+const noop = () => {}
+
+module.exports = class BodyReadable extends Readable {
+  constructor ({
+    resume,
+    abort,
+    contentType = '',
+    highWaterMark = 64 * 1024 // Same as nodejs fs streams.
+  }) {
+    super({
+      autoDestroy: true,
+      read: resume,
+      highWaterMark
+    })
+
+    this._readableState.dataEmitted = false
+
+    this[kAbort] = abort
+    this[kConsume] = null
+    this[kBody] = null
+    this[kContentType] = contentType
+
+    // Is stream being consumed through Readable API?
+    // This is an optimization so that we avoid checking
+    // for 'data' and 'readable' listeners in the hot path
+    // inside push().
+    this[kReading] = false
+  }
+
+  destroy (err) {
+    if (this.destroyed) {
+      // Node < 16
+      return this
+    }
+
+    if (!err && !this._readableState.endEmitted) {
+      err = new RequestAbortedError()
+    }
+
+    if (err) {
+      this[kAbort]()
+    }
+
+    return super.destroy(err)
+  }
+
+  emit (ev, ...args) {
+    if (ev === 'data') {
+      // Node < 16.7
+      this._readableState.dataEmitted = true
+    } else if (ev === 'error') {
+      // Node < 16
+      this._readableState.errorEmitted = true
+    }
+    return super.emit(ev, ...args)
+  }
+
+  on (ev, ...args) {
+    if (ev === 'data' || ev === 'readable') {
+      this[kReading] = true
+    }
+    return super.on(ev, ...args)
+  }
+
+  addListener (ev, ...args) {
+    return this.on(ev, ...args)
+  }
+
+  off (ev, ...args) {
+    const ret = super.off(ev, ...args)
+    if (ev === 'data' || ev === 'readable') {
+      this[kReading] = (
+        this.listenerCount('data') > 0 ||
+        this.listenerCount('readable') > 0
+      )
+    }
+    return ret
+  }
+
+  removeListener (ev, ...args) {
+    return this.off(ev, ...args)
+  }
+
+  push (chunk) {
+    if (this[kConsume] && chunk !== null && this.readableLength === 0) {
+      consumePush(this[kConsume], chunk)
+      return this[kReading] ? super.push(chunk) : true
+    }
+    return super.push(chunk)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-text
+  async text () {
+    return consume(this, 'text')
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-json
+  async json () {
+    return consume(this, 'json')
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-blob
+  async blob () {
+    return consume(this, 'blob')
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-arraybuffer
+  async arrayBuffer () {
+    return consume(this, 'arrayBuffer')
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-formdata
+  async formData () {
+    // TODO: Implement.
+    throw new NotSupportedError()
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-bodyused
+  get bodyUsed () {
+    return util.isDisturbed(this)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-body-body
+  get body () {
+    if (!this[kBody]) {
+      this[kBody] = ReadableStreamFrom(this)
+      if (this[kConsume]) {
+        // TODO: Is this the best way to force a lock?
+        this[kBody].getReader() // Ensure stream is locked.
+        assert(this[kBody].locked)
+      }
+    }
+    return this[kBody]
+  }
+
+  dump (opts) {
+    let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144
+    const signal = opts && opts.signal
+
+    if (signal) {
+      try {
+        if (typeof signal !== 'object' || !('aborted' in signal)) {
+          throw new InvalidArgumentError('signal must be an AbortSignal')
+        }
+        util.throwIfAborted(signal)
+      } catch (err) {
+        return Promise.reject(err)
+      }
+    }
+
+    if (this.closed) {
+      return Promise.resolve(null)
+    }
+
+    return new Promise((resolve, reject) => {
+      const signalListenerCleanup = signal
+        ? util.addAbortListener(signal, () => {
+          this.destroy()
+        })
+        : noop
+
+      this
+        .on('close', function () {
+          signalListenerCleanup()
+          if (signal && signal.aborted) {
+            reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' }))
+          } else {
+            resolve(null)
+          }
+        })
+        .on('error', noop)
+        .on('data', function (chunk) {
+          limit -= chunk.length
+          if (limit <= 0) {
+            this.destroy()
+          }
+        })
+        .resume()
+    })
+  }
+}
+
+// https://streams.spec.whatwg.org/#readablestream-locked
+function isLocked (self) {
+  // Consume is an implicit lock.
+  return (self[kBody] && self[kBody].locked === true) || self[kConsume]
+}
+
+// https://fetch.spec.whatwg.org/#body-unusable
+function isUnusable (self) {
+  return util.isDisturbed(self) || isLocked(self)
+}
+
+async function consume (stream, type) {
+  if (isUnusable(stream)) {
+    throw new TypeError('unusable')
+  }
+
+  assert(!stream[kConsume])
+
+  return new Promise((resolve, reject) => {
+    stream[kConsume] = {
+      type,
+      stream,
+      resolve,
+      reject,
+      length: 0,
+      body: []
+    }
+
+    stream
+      .on('error', function (err) {
+        consumeFinish(this[kConsume], err)
+      })
+      .on('close', function () {
+        if (this[kConsume].body !== null) {
+          consumeFinish(this[kConsume], new RequestAbortedError())
+        }
+      })
+
+    process.nextTick(consumeStart, stream[kConsume])
+  })
+}
+
+function consumeStart (consume) {
+  if (consume.body === null) {
+    return
+  }
+
+  const { _readableState: state } = consume.stream
+
+  for (const chunk of state.buffer) {
+    consumePush(consume, chunk)
+  }
+
+  if (state.endEmitted) {
+    consumeEnd(this[kConsume])
+  } else {
+    consume.stream.on('end', function () {
+      consumeEnd(this[kConsume])
+    })
+  }
+
+  consume.stream.resume()
+
+  while (consume.stream.read() != null) {
+    // Loop
+  }
+}
+
+function consumeEnd (consume) {
+  const { type, body, resolve, stream, length } = consume
+
+  try {
+    if (type === 'text') {
+      resolve(toUSVString(Buffer.concat(body)))
+    } else if (type === 'json') {
+      resolve(JSON.parse(Buffer.concat(body)))
+    } else if (type === 'arrayBuffer') {
+      const dst = new Uint8Array(length)
+
+      let pos = 0
+      for (const buf of body) {
+        dst.set(buf, pos)
+        pos += buf.byteLength
+      }
+
+      resolve(dst.buffer)
+    } else if (type === 'blob') {
+      if (!Blob) {
+        Blob = (__nccwpck_require__(181).Blob)
+      }
+      resolve(new Blob(body, { type: stream[kContentType] }))
+    }
+
+    consumeFinish(consume)
+  } catch (err) {
+    stream.destroy(err)
+  }
+}
+
+function consumePush (consume, chunk) {
+  consume.length += chunk.length
+  consume.body.push(chunk)
+}
+
+function consumeFinish (consume, err) {
+  if (consume.body === null) {
+    return
+  }
+
+  if (err) {
+    consume.reject(err)
+  } else {
+    consume.resolve()
+  }
+
+  consume.type = null
+  consume.stream = null
+  consume.resolve = null
+  consume.reject = null
+  consume.length = 0
+  consume.body = null
+}
+
+
+/***/ }),
+
+/***/ 4920:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const assert = __nccwpck_require__(2613)
+const {
+  ResponseStatusCodeError
+} = __nccwpck_require__(3862)
+const { toUSVString } = __nccwpck_require__(3465)
+
+async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) {
+  assert(body)
+
+  let chunks = []
+  let limit = 0
+
+  for await (const chunk of body) {
+    chunks.push(chunk)
+    limit += chunk.length
+    if (limit > 128 * 1024) {
+      chunks = null
+      break
+    }
+  }
+
+  if (statusCode === 204 || !contentType || !chunks) {
+    process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
+    return
+  }
+
+  try {
+    if (contentType.startsWith('application/json')) {
+      const payload = JSON.parse(toUSVString(Buffer.concat(chunks)))
+      process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
+      return
+    }
+
+    if (contentType.startsWith('text/')) {
+      const payload = toUSVString(Buffer.concat(chunks))
+      process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload))
+      return
+    }
+  } catch (err) {
+    // Process in a fallback if error
+  }
+
+  process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers))
+}
+
+module.exports = { getResolveErrorBodyCallback }
+
+
+/***/ }),
+
+/***/ 7568:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const {
+  BalancedPoolMissingUpstreamError,
+  InvalidArgumentError
+} = __nccwpck_require__(3862)
+const {
+  PoolBase,
+  kClients,
+  kNeedDrain,
+  kAddClient,
+  kRemoveClient,
+  kGetDispatcher
+} = __nccwpck_require__(9769)
+const Pool = __nccwpck_require__(7787)
+const { kUrl, kInterceptors } = __nccwpck_require__(4856)
+const { parseOrigin } = __nccwpck_require__(3465)
+const kFactory = Symbol('factory')
+
+const kOptions = Symbol('options')
+const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor')
+const kCurrentWeight = Symbol('kCurrentWeight')
+const kIndex = Symbol('kIndex')
+const kWeight = Symbol('kWeight')
+const kMaxWeightPerServer = Symbol('kMaxWeightPerServer')
+const kErrorPenalty = Symbol('kErrorPenalty')
+
+function getGreatestCommonDivisor (a, b) {
+  if (b === 0) return a
+  return getGreatestCommonDivisor(b, a % b)
+}
+
+function defaultFactory (origin, opts) {
+  return new Pool(origin, opts)
+}
+
+class BalancedPool extends PoolBase {
+  constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) {
+    super()
+
+    this[kOptions] = opts
+    this[kIndex] = -1
+    this[kCurrentWeight] = 0
+
+    this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100
+    this[kErrorPenalty] = this[kOptions].errorPenalty || 15
+
+    if (!Array.isArray(upstreams)) {
+      upstreams = [upstreams]
+    }
+
+    if (typeof factory !== 'function') {
+      throw new InvalidArgumentError('factory must be a function.')
+    }
+
+    this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool)
+      ? opts.interceptors.BalancedPool
+      : []
+    this[kFactory] = factory
+
+    for (const upstream of upstreams) {
+      this.addUpstream(upstream)
+    }
+    this._updateBalancedPoolStats()
+  }
+
+  addUpstream (upstream) {
+    const upstreamOrigin = parseOrigin(upstream).origin
+
+    if (this[kClients].find((pool) => (
+      pool[kUrl].origin === upstreamOrigin &&
+      pool.closed !== true &&
+      pool.destroyed !== true
+    ))) {
+      return this
+    }
+    const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions]))
+
+    this[kAddClient](pool)
+    pool.on('connect', () => {
+      pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty])
+    })
+
+    pool.on('connectionError', () => {
+      pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
+      this._updateBalancedPoolStats()
+    })
+
+    pool.on('disconnect', (...args) => {
+      const err = args[2]
+      if (err && err.code === 'UND_ERR_SOCKET') {
+        // decrease the weight of the pool.
+        pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty])
+        this._updateBalancedPoolStats()
+      }
+    })
+
+    for (const client of this[kClients]) {
+      client[kWeight] = this[kMaxWeightPerServer]
+    }
+
+    this._updateBalancedPoolStats()
+
+    return this
+  }
+
+  _updateBalancedPoolStats () {
+    this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0)
+  }
+
+  removeUpstream (upstream) {
+    const upstreamOrigin = parseOrigin(upstream).origin
+
+    const pool = this[kClients].find((pool) => (
+      pool[kUrl].origin === upstreamOrigin &&
+      pool.closed !== true &&
+      pool.destroyed !== true
+    ))
+
+    if (pool) {
+      this[kRemoveClient](pool)
+    }
+
+    return this
+  }
+
+  get upstreams () {
+    return this[kClients]
+      .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true)
+      .map((p) => p[kUrl].origin)
+  }
+
+  [kGetDispatcher] () {
+    // We validate that pools is greater than 0,
+    // otherwise we would have to wait until an upstream
+    // is added, which might never happen.
+    if (this[kClients].length === 0) {
+      throw new BalancedPoolMissingUpstreamError()
+    }
+
+    const dispatcher = this[kClients].find(dispatcher => (
+      !dispatcher[kNeedDrain] &&
+      dispatcher.closed !== true &&
+      dispatcher.destroyed !== true
+    ))
+
+    if (!dispatcher) {
+      return
+    }
+
+    const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true)
+
+    if (allClientsBusy) {
+      return
+    }
+
+    let counter = 0
+
+    let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain])
+
+    while (counter++ < this[kClients].length) {
+      this[kIndex] = (this[kIndex] + 1) % this[kClients].length
+      const pool = this[kClients][this[kIndex]]
+
+      // find pool index with the largest weight
+      if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) {
+        maxWeightIndex = this[kIndex]
+      }
+
+      // decrease the current weight every `this[kClients].length`.
+      if (this[kIndex] === 0) {
+        // Set the current weight to the next lower weight.
+        this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor]
+
+        if (this[kCurrentWeight] <= 0) {
+          this[kCurrentWeight] = this[kMaxWeightPerServer]
+        }
+      }
+      if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) {
+        return pool
+      }
+    }
+
+    this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight]
+    this[kIndex] = maxWeightIndex
+    return this[kClients][maxWeightIndex]
+  }
+}
+
+module.exports = BalancedPool
+
+
+/***/ }),
+
+/***/ 4890:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { kConstruct } = __nccwpck_require__(1829)
+const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(726)
+const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3465)
+const { kHeadersList } = __nccwpck_require__(4856)
+const { webidl } = __nccwpck_require__(29)
+const { Response, cloneResponse } = __nccwpck_require__(5683)
+const { Request } = __nccwpck_require__(5767)
+const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(8323)
+const { fetching } = __nccwpck_require__(8358)
+const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(2696)
+const assert = __nccwpck_require__(2613)
+const { getGlobalDispatcher } = __nccwpck_require__(7882)
+
+/**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation
+ * @typedef {Object} CacheBatchOperation
+ * @property {'delete' | 'put'} type
+ * @property {any} request
+ * @property {any} response
+ * @property {import('../../types/cache').CacheQueryOptions} options
+ */
+
+/**
+ * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list
+ * @typedef {[any, any][]} requestResponseList
+ */
+
+class Cache {
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list
+   * @type {requestResponseList}
+   */
+  #relevantRequestResponseList
+
+  constructor () {
+    if (arguments[0] !== kConstruct) {
+      webidl.illegalConstructor()
+    }
+
+    this.#relevantRequestResponseList = arguments[1]
+  }
+
+  async match (request, options = {}) {
+    webidl.brandCheck(this, Cache)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' })
+
+    request = webidl.converters.RequestInfo(request)
+    options = webidl.converters.CacheQueryOptions(options)
+
+    const p = await this.matchAll(request, options)
+
+    if (p.length === 0) {
+      return
+    }
+
+    return p[0]
+  }
+
+  async matchAll (request = undefined, options = {}) {
+    webidl.brandCheck(this, Cache)
+
+    if (request !== undefined) request = webidl.converters.RequestInfo(request)
+    options = webidl.converters.CacheQueryOptions(options)
+
+    // 1.
+    let r = null
+
+    // 2.
+    if (request !== undefined) {
+      if (request instanceof Request) {
+        // 2.1.1
+        r = request[kState]
+
+        // 2.1.2
+        if (r.method !== 'GET' && !options.ignoreMethod) {
+          return []
+        }
+      } else if (typeof request === 'string') {
+        // 2.2.1
+        r = new Request(request)[kState]
+      }
+    }
+
+    // 5.
+    // 5.1
+    const responses = []
+
+    // 5.2
+    if (request === undefined) {
+      // 5.2.1
+      for (const requestResponse of this.#relevantRequestResponseList) {
+        responses.push(requestResponse[1])
+      }
+    } else { // 5.3
+      // 5.3.1
+      const requestResponses = this.#queryCache(r, options)
+
+      // 5.3.2
+      for (const requestResponse of requestResponses) {
+        responses.push(requestResponse[1])
+      }
+    }
+
+    // 5.4
+    // We don't implement CORs so we don't need to loop over the responses, yay!
+
+    // 5.5.1
+    const responseList = []
+
+    // 5.5.2
+    for (const response of responses) {
+      // 5.5.2.1
+      const responseObject = new Response(response.body?.source ?? null)
+      const body = responseObject[kState].body
+      responseObject[kState] = response
+      responseObject[kState].body = body
+      responseObject[kHeaders][kHeadersList] = response.headersList
+      responseObject[kHeaders][kGuard] = 'immutable'
+
+      responseList.push(responseObject)
+    }
+
+    // 6.
+    return Object.freeze(responseList)
+  }
+
+  async add (request) {
+    webidl.brandCheck(this, Cache)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' })
+
+    request = webidl.converters.RequestInfo(request)
+
+    // 1.
+    const requests = [request]
+
+    // 2.
+    const responseArrayPromise = this.addAll(requests)
+
+    // 3.
+    return await responseArrayPromise
+  }
+
+  async addAll (requests) {
+    webidl.brandCheck(this, Cache)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' })
+
+    requests = webidl.converters['sequence<RequestInfo>'](requests)
+
+    // 1.
+    const responsePromises = []
+
+    // 2.
+    const requestList = []
+
+    // 3.
+    for (const request of requests) {
+      if (typeof request === 'string') {
+        continue
+      }
+
+      // 3.1
+      const r = request[kState]
+
+      // 3.2
+      if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') {
+        throw webidl.errors.exception({
+          header: 'Cache.addAll',
+          message: 'Expected http/s scheme when method is not GET.'
+        })
+      }
+    }
+
+    // 4.
+    /** @type {ReturnType<typeof fetching>[]} */
+    const fetchControllers = []
+
+    // 5.
+    for (const request of requests) {
+      // 5.1
+      const r = new Request(request)[kState]
+
+      // 5.2
+      if (!urlIsHttpHttpsScheme(r.url)) {
+        throw webidl.errors.exception({
+          header: 'Cache.addAll',
+          message: 'Expected http/s scheme.'
+        })
+      }
+
+      // 5.4
+      r.initiator = 'fetch'
+      r.destination = 'subresource'
+
+      // 5.5
+      requestList.push(r)
+
+      // 5.6
+      const responsePromise = createDeferredPromise()
+
+      // 5.7
+      fetchControllers.push(fetching({
+        request: r,
+        dispatcher: getGlobalDispatcher(),
+        processResponse (response) {
+          // 1.
+          if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) {
+            responsePromise.reject(webidl.errors.exception({
+              header: 'Cache.addAll',
+              message: 'Received an invalid status code or the request failed.'
+            }))
+          } else if (response.headersList.contains('vary')) { // 2.
+            // 2.1
+            const fieldValues = getFieldValues(response.headersList.get('vary'))
+
+            // 2.2
+            for (const fieldValue of fieldValues) {
+              // 2.2.1
+              if (fieldValue === '*') {
+                responsePromise.reject(webidl.errors.exception({
+                  header: 'Cache.addAll',
+                  message: 'invalid vary field value'
+                }))
+
+                for (const controller of fetchControllers) {
+                  controller.abort()
+                }
+
+                return
+              }
+            }
+          }
+        },
+        processResponseEndOfBody (response) {
+          // 1.
+          if (response.aborted) {
+            responsePromise.reject(new DOMException('aborted', 'AbortError'))
+            return
+          }
+
+          // 2.
+          responsePromise.resolve(response)
+        }
+      }))
+
+      // 5.8
+      responsePromises.push(responsePromise.promise)
+    }
+
+    // 6.
+    const p = Promise.all(responsePromises)
+
+    // 7.
+    const responses = await p
+
+    // 7.1
+    const operations = []
+
+    // 7.2
+    let index = 0
+
+    // 7.3
+    for (const response of responses) {
+      // 7.3.1
+      /** @type {CacheBatchOperation} */
+      const operation = {
+        type: 'put', // 7.3.2
+        request: requestList[index], // 7.3.3
+        response // 7.3.4
+      }
+
+      operations.push(operation) // 7.3.5
+
+      index++ // 7.3.6
+    }
+
+    // 7.5
+    const cacheJobPromise = createDeferredPromise()
+
+    // 7.6.1
+    let errorData = null
+
+    // 7.6.2
+    try {
+      this.#batchCacheOperations(operations)
+    } catch (e) {
+      errorData = e
+    }
+
+    // 7.6.3
+    queueMicrotask(() => {
+      // 7.6.3.1
+      if (errorData === null) {
+        cacheJobPromise.resolve(undefined)
+      } else {
+        // 7.6.3.2
+        cacheJobPromise.reject(errorData)
+      }
+    })
+
+    // 7.7
+    return cacheJobPromise.promise
+  }
+
+  async put (request, response) {
+    webidl.brandCheck(this, Cache)
+    webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' })
+
+    request = webidl.converters.RequestInfo(request)
+    response = webidl.converters.Response(response)
+
+    // 1.
+    let innerRequest = null
+
+    // 2.
+    if (request instanceof Request) {
+      innerRequest = request[kState]
+    } else { // 3.
+      innerRequest = new Request(request)[kState]
+    }
+
+    // 4.
+    if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') {
+      throw webidl.errors.exception({
+        header: 'Cache.put',
+        message: 'Expected an http/s scheme when method is not GET'
+      })
+    }
+
+    // 5.
+    const innerResponse = response[kState]
+
+    // 6.
+    if (innerResponse.status === 206) {
+      throw webidl.errors.exception({
+        header: 'Cache.put',
+        message: 'Got 206 status'
+      })
+    }
+
+    // 7.
+    if (innerResponse.headersList.contains('vary')) {
+      // 7.1.
+      const fieldValues = getFieldValues(innerResponse.headersList.get('vary'))
+
+      // 7.2.
+      for (const fieldValue of fieldValues) {
+        // 7.2.1
+        if (fieldValue === '*') {
+          throw webidl.errors.exception({
+            header: 'Cache.put',
+            message: 'Got * vary field value'
+          })
+        }
+      }
+    }
+
+    // 8.
+    if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) {
+      throw webidl.errors.exception({
+        header: 'Cache.put',
+        message: 'Response body is locked or disturbed'
+      })
+    }
+
+    // 9.
+    const clonedResponse = cloneResponse(innerResponse)
+
+    // 10.
+    const bodyReadPromise = createDeferredPromise()
+
+    // 11.
+    if (innerResponse.body != null) {
+      // 11.1
+      const stream = innerResponse.body.stream
+
+      // 11.2
+      const reader = stream.getReader()
+
+      // 11.3
+      readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
+    } else {
+      bodyReadPromise.resolve(undefined)
+    }
+
+    // 12.
+    /** @type {CacheBatchOperation[]} */
+    const operations = []
+
+    // 13.
+    /** @type {CacheBatchOperation} */
+    const operation = {
+      type: 'put', // 14.
+      request: innerRequest, // 15.
+      response: clonedResponse // 16.
+    }
+
+    // 17.
+    operations.push(operation)
+
+    // 19.
+    const bytes = await bodyReadPromise.promise
+
+    if (clonedResponse.body != null) {
+      clonedResponse.body.source = bytes
+    }
+
+    // 19.1
+    const cacheJobPromise = createDeferredPromise()
+
+    // 19.2.1
+    let errorData = null
+
+    // 19.2.2
+    try {
+      this.#batchCacheOperations(operations)
+    } catch (e) {
+      errorData = e
+    }
+
+    // 19.2.3
+    queueMicrotask(() => {
+      // 19.2.3.1
+      if (errorData === null) {
+        cacheJobPromise.resolve()
+      } else { // 19.2.3.2
+        cacheJobPromise.reject(errorData)
+      }
+    })
+
+    return cacheJobPromise.promise
+  }
+
+  async delete (request, options = {}) {
+    webidl.brandCheck(this, Cache)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' })
+
+    request = webidl.converters.RequestInfo(request)
+    options = webidl.converters.CacheQueryOptions(options)
+
+    /**
+     * @type {Request}
+     */
+    let r = null
+
+    if (request instanceof Request) {
+      r = request[kState]
+
+      if (r.method !== 'GET' && !options.ignoreMethod) {
+        return false
+      }
+    } else {
+      assert(typeof request === 'string')
+
+      r = new Request(request)[kState]
+    }
+
+    /** @type {CacheBatchOperation[]} */
+    const operations = []
+
+    /** @type {CacheBatchOperation} */
+    const operation = {
+      type: 'delete',
+      request: r,
+      options
+    }
+
+    operations.push(operation)
+
+    const cacheJobPromise = createDeferredPromise()
+
+    let errorData = null
+    let requestResponses
+
+    try {
+      requestResponses = this.#batchCacheOperations(operations)
+    } catch (e) {
+      errorData = e
+    }
+
+    queueMicrotask(() => {
+      if (errorData === null) {
+        cacheJobPromise.resolve(!!requestResponses?.length)
+      } else {
+        cacheJobPromise.reject(errorData)
+      }
+    })
+
+    return cacheJobPromise.promise
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
+   * @param {any} request
+   * @param {import('../../types/cache').CacheQueryOptions} options
+   * @returns {readonly Request[]}
+   */
+  async keys (request = undefined, options = {}) {
+    webidl.brandCheck(this, Cache)
+
+    if (request !== undefined) request = webidl.converters.RequestInfo(request)
+    options = webidl.converters.CacheQueryOptions(options)
+
+    // 1.
+    let r = null
+
+    // 2.
+    if (request !== undefined) {
+      // 2.1
+      if (request instanceof Request) {
+        // 2.1.1
+        r = request[kState]
+
+        // 2.1.2
+        if (r.method !== 'GET' && !options.ignoreMethod) {
+          return []
+        }
+      } else if (typeof request === 'string') { // 2.2
+        r = new Request(request)[kState]
+      }
+    }
+
+    // 4.
+    const promise = createDeferredPromise()
+
+    // 5.
+    // 5.1
+    const requests = []
+
+    // 5.2
+    if (request === undefined) {
+      // 5.2.1
+      for (const requestResponse of this.#relevantRequestResponseList) {
+        // 5.2.1.1
+        requests.push(requestResponse[0])
+      }
+    } else { // 5.3
+      // 5.3.1
+      const requestResponses = this.#queryCache(r, options)
+
+      // 5.3.2
+      for (const requestResponse of requestResponses) {
+        // 5.3.2.1
+        requests.push(requestResponse[0])
+      }
+    }
+
+    // 5.4
+    queueMicrotask(() => {
+      // 5.4.1
+      const requestList = []
+
+      // 5.4.2
+      for (const request of requests) {
+        const requestObject = new Request('https://a')
+        requestObject[kState] = request
+        requestObject[kHeaders][kHeadersList] = request.headersList
+        requestObject[kHeaders][kGuard] = 'immutable'
+        requestObject[kRealm] = request.client
+
+        // 5.4.2.1
+        requestList.push(requestObject)
+      }
+
+      // 5.4.3
+      promise.resolve(Object.freeze(requestList))
+    })
+
+    return promise.promise
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm
+   * @param {CacheBatchOperation[]} operations
+   * @returns {requestResponseList}
+   */
+  #batchCacheOperations (operations) {
+    // 1.
+    const cache = this.#relevantRequestResponseList
+
+    // 2.
+    const backupCache = [...cache]
+
+    // 3.
+    const addedItems = []
+
+    // 4.1
+    const resultList = []
+
+    try {
+      // 4.2
+      for (const operation of operations) {
+        // 4.2.1
+        if (operation.type !== 'delete' && operation.type !== 'put') {
+          throw webidl.errors.exception({
+            header: 'Cache.#batchCacheOperations',
+            message: 'operation type does not match "delete" or "put"'
+          })
+        }
+
+        // 4.2.2
+        if (operation.type === 'delete' && operation.response != null) {
+          throw webidl.errors.exception({
+            header: 'Cache.#batchCacheOperations',
+            message: 'delete operation should not have an associated response'
+          })
+        }
+
+        // 4.2.3
+        if (this.#queryCache(operation.request, operation.options, addedItems).length) {
+          throw new DOMException('???', 'InvalidStateError')
+        }
+
+        // 4.2.4
+        let requestResponses
+
+        // 4.2.5
+        if (operation.type === 'delete') {
+          // 4.2.5.1
+          requestResponses = this.#queryCache(operation.request, operation.options)
+
+          // TODO: the spec is wrong, this is needed to pass WPTs
+          if (requestResponses.length === 0) {
+            return []
+          }
+
+          // 4.2.5.2
+          for (const requestResponse of requestResponses) {
+            const idx = cache.indexOf(requestResponse)
+            assert(idx !== -1)
+
+            // 4.2.5.2.1
+            cache.splice(idx, 1)
+          }
+        } else if (operation.type === 'put') { // 4.2.6
+          // 4.2.6.1
+          if (operation.response == null) {
+            throw webidl.errors.exception({
+              header: 'Cache.#batchCacheOperations',
+              message: 'put operation should have an associated response'
+            })
+          }
+
+          // 4.2.6.2
+          const r = operation.request
+
+          // 4.2.6.3
+          if (!urlIsHttpHttpsScheme(r.url)) {
+            throw webidl.errors.exception({
+              header: 'Cache.#batchCacheOperations',
+              message: 'expected http or https scheme'
+            })
+          }
+
+          // 4.2.6.4
+          if (r.method !== 'GET') {
+            throw webidl.errors.exception({
+              header: 'Cache.#batchCacheOperations',
+              message: 'not get method'
+            })
+          }
+
+          // 4.2.6.5
+          if (operation.options != null) {
+            throw webidl.errors.exception({
+              header: 'Cache.#batchCacheOperations',
+              message: 'options must not be defined'
+            })
+          }
+
+          // 4.2.6.6
+          requestResponses = this.#queryCache(operation.request)
+
+          // 4.2.6.7
+          for (const requestResponse of requestResponses) {
+            const idx = cache.indexOf(requestResponse)
+            assert(idx !== -1)
+
+            // 4.2.6.7.1
+            cache.splice(idx, 1)
+          }
+
+          // 4.2.6.8
+          cache.push([operation.request, operation.response])
+
+          // 4.2.6.10
+          addedItems.push([operation.request, operation.response])
+        }
+
+        // 4.2.7
+        resultList.push([operation.request, operation.response])
+      }
+
+      // 4.3
+      return resultList
+    } catch (e) { // 5.
+      // 5.1
+      this.#relevantRequestResponseList.length = 0
+
+      // 5.2
+      this.#relevantRequestResponseList = backupCache
+
+      // 5.3
+      throw e
+    }
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#query-cache
+   * @param {any} requestQuery
+   * @param {import('../../types/cache').CacheQueryOptions} options
+   * @param {requestResponseList} targetStorage
+   * @returns {requestResponseList}
+   */
+  #queryCache (requestQuery, options, targetStorage) {
+    /** @type {requestResponseList} */
+    const resultList = []
+
+    const storage = targetStorage ?? this.#relevantRequestResponseList
+
+    for (const requestResponse of storage) {
+      const [cachedRequest, cachedResponse] = requestResponse
+      if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) {
+        resultList.push(requestResponse)
+      }
+    }
+
+    return resultList
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm
+   * @param {any} requestQuery
+   * @param {any} request
+   * @param {any | null} response
+   * @param {import('../../types/cache').CacheQueryOptions | undefined} options
+   * @returns {boolean}
+   */
+  #requestMatchesCachedItem (requestQuery, request, response = null, options) {
+    // if (options?.ignoreMethod === false && request.method === 'GET') {
+    //   return false
+    // }
+
+    const queryURL = new URL(requestQuery.url)
+
+    const cachedURL = new URL(request.url)
+
+    if (options?.ignoreSearch) {
+      cachedURL.search = ''
+
+      queryURL.search = ''
+    }
+
+    if (!urlEquals(queryURL, cachedURL, true)) {
+      return false
+    }
+
+    if (
+      response == null ||
+      options?.ignoreVary ||
+      !response.headersList.contains('vary')
+    ) {
+      return true
+    }
+
+    const fieldValues = getFieldValues(response.headersList.get('vary'))
+
+    for (const fieldValue of fieldValues) {
+      if (fieldValue === '*') {
+        return false
+      }
+
+      const requestValue = request.headersList.get(fieldValue)
+      const queryValue = requestQuery.headersList.get(fieldValue)
+
+      // If one has the header and the other doesn't, or one has
+      // a different value than the other, return false
+      if (requestValue !== queryValue) {
+        return false
+      }
+    }
+
+    return true
+  }
+}
+
+Object.defineProperties(Cache.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'Cache',
+    configurable: true
+  },
+  match: kEnumerableProperty,
+  matchAll: kEnumerableProperty,
+  add: kEnumerableProperty,
+  addAll: kEnumerableProperty,
+  put: kEnumerableProperty,
+  delete: kEnumerableProperty,
+  keys: kEnumerableProperty
+})
+
+const cacheQueryOptionConverters = [
+  {
+    key: 'ignoreSearch',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'ignoreMethod',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'ignoreVary',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  }
+]
+
+webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters)
+
+webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([
+  ...cacheQueryOptionConverters,
+  {
+    key: 'cacheName',
+    converter: webidl.converters.DOMString
+  }
+])
+
+webidl.converters.Response = webidl.interfaceConverter(Response)
+
+webidl.converters['sequence<RequestInfo>'] = webidl.sequenceConverter(
+  webidl.converters.RequestInfo
+)
+
+module.exports = {
+  Cache
+}
+
+
+/***/ }),
+
+/***/ 9605:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { kConstruct } = __nccwpck_require__(1829)
+const { Cache } = __nccwpck_require__(4890)
+const { webidl } = __nccwpck_require__(29)
+const { kEnumerableProperty } = __nccwpck_require__(3465)
+
+class CacheStorage {
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map
+   * @type {Map<string, import('./cache').requestResponseList}
+   */
+  #caches = new Map()
+
+  constructor () {
+    if (arguments[0] !== kConstruct) {
+      webidl.illegalConstructor()
+    }
+  }
+
+  async match (request, options = {}) {
+    webidl.brandCheck(this, CacheStorage)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' })
+
+    request = webidl.converters.RequestInfo(request)
+    options = webidl.converters.MultiCacheQueryOptions(options)
+
+    // 1.
+    if (options.cacheName != null) {
+      // 1.1.1.1
+      if (this.#caches.has(options.cacheName)) {
+        // 1.1.1.1.1
+        const cacheList = this.#caches.get(options.cacheName)
+        const cache = new Cache(kConstruct, cacheList)
+
+        return await cache.match(request, options)
+      }
+    } else { // 2.
+      // 2.2
+      for (const cacheList of this.#caches.values()) {
+        const cache = new Cache(kConstruct, cacheList)
+
+        // 2.2.1.2
+        const response = await cache.match(request, options)
+
+        if (response !== undefined) {
+          return response
+        }
+      }
+    }
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#cache-storage-has
+   * @param {string} cacheName
+   * @returns {Promise<boolean>}
+   */
+  async has (cacheName) {
+    webidl.brandCheck(this, CacheStorage)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' })
+
+    cacheName = webidl.converters.DOMString(cacheName)
+
+    // 2.1.1
+    // 2.2
+    return this.#caches.has(cacheName)
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open
+   * @param {string} cacheName
+   * @returns {Promise<Cache>}
+   */
+  async open (cacheName) {
+    webidl.brandCheck(this, CacheStorage)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' })
+
+    cacheName = webidl.converters.DOMString(cacheName)
+
+    // 2.1
+    if (this.#caches.has(cacheName)) {
+      // await caches.open('v1') !== await caches.open('v1')
+
+      // 2.1.1
+      const cache = this.#caches.get(cacheName)
+
+      // 2.1.1.1
+      return new Cache(kConstruct, cache)
+    }
+
+    // 2.2
+    const cache = []
+
+    // 2.3
+    this.#caches.set(cacheName, cache)
+
+    // 2.4
+    return new Cache(kConstruct, cache)
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete
+   * @param {string} cacheName
+   * @returns {Promise<boolean>}
+   */
+  async delete (cacheName) {
+    webidl.brandCheck(this, CacheStorage)
+    webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' })
+
+    cacheName = webidl.converters.DOMString(cacheName)
+
+    return this.#caches.delete(cacheName)
+  }
+
+  /**
+   * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys
+   * @returns {string[]}
+   */
+  async keys () {
+    webidl.brandCheck(this, CacheStorage)
+
+    // 2.1
+    const keys = this.#caches.keys()
+
+    // 2.2
+    return [...keys]
+  }
+}
+
+Object.defineProperties(CacheStorage.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'CacheStorage',
+    configurable: true
+  },
+  match: kEnumerableProperty,
+  has: kEnumerableProperty,
+  open: kEnumerableProperty,
+  delete: kEnumerableProperty,
+  keys: kEnumerableProperty
+})
+
+module.exports = {
+  CacheStorage
+}
+
+
+/***/ }),
+
+/***/ 1829:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+module.exports = {
+  kConstruct: (__nccwpck_require__(4856).kConstruct)
+}
+
+
+/***/ }),
+
+/***/ 726:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const assert = __nccwpck_require__(2613)
+const { URLSerializer } = __nccwpck_require__(3911)
+const { isValidHeaderName } = __nccwpck_require__(2696)
+
+/**
+ * @see https://url.spec.whatwg.org/#concept-url-equals
+ * @param {URL} A
+ * @param {URL} B
+ * @param {boolean | undefined} excludeFragment
+ * @returns {boolean}
+ */
+function urlEquals (A, B, excludeFragment = false) {
+  const serializedA = URLSerializer(A, excludeFragment)
+
+  const serializedB = URLSerializer(B, excludeFragment)
+
+  return serializedA === serializedB
+}
+
+/**
+ * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262
+ * @param {string} header
+ */
+function fieldValues (header) {
+  assert(header !== null)
+
+  const values = []
+
+  for (let value of header.split(',')) {
+    value = value.trim()
+
+    if (!value.length) {
+      continue
+    } else if (!isValidHeaderName(value)) {
+      continue
+    }
+
+    values.push(value)
+  }
+
+  return values
+}
+
+module.exports = {
+  urlEquals,
+  fieldValues
+}
+
+
+/***/ }),
+
+/***/ 6646:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+// @ts-check
+
+
+
+/* global WebAssembly */
+
+const assert = __nccwpck_require__(2613)
+const net = __nccwpck_require__(9278)
+const http = __nccwpck_require__(8611)
+const { pipeline } = __nccwpck_require__(2203)
+const util = __nccwpck_require__(3465)
+const timers = __nccwpck_require__(2907)
+const Request = __nccwpck_require__(4916)
+const DispatcherBase = __nccwpck_require__(9368)
+const {
+  RequestContentLengthMismatchError,
+  ResponseContentLengthMismatchError,
+  InvalidArgumentError,
+  RequestAbortedError,
+  HeadersTimeoutError,
+  HeadersOverflowError,
+  SocketError,
+  InformationalError,
+  BodyTimeoutError,
+  HTTPParserError,
+  ResponseExceededMaxSizeError,
+  ClientDestroyedError
+} = __nccwpck_require__(3862)
+const buildConnector = __nccwpck_require__(2559)
+const {
+  kUrl,
+  kReset,
+  kServerName,
+  kClient,
+  kBusy,
+  kParser,
+  kConnect,
+  kBlocking,
+  kResuming,
+  kRunning,
+  kPending,
+  kSize,
+  kWriting,
+  kQueue,
+  kConnected,
+  kConnecting,
+  kNeedDrain,
+  kNoRef,
+  kKeepAliveDefaultTimeout,
+  kHostHeader,
+  kPendingIdx,
+  kRunningIdx,
+  kError,
+  kPipelining,
+  kSocket,
+  kKeepAliveTimeoutValue,
+  kMaxHeadersSize,
+  kKeepAliveMaxTimeout,
+  kKeepAliveTimeoutThreshold,
+  kHeadersTimeout,
+  kBodyTimeout,
+  kStrictContentLength,
+  kConnector,
+  kMaxRedirections,
+  kMaxRequests,
+  kCounter,
+  kClose,
+  kDestroy,
+  kDispatch,
+  kInterceptors,
+  kLocalAddress,
+  kMaxResponseSize,
+  kHTTPConnVersion,
+  // HTTP2
+  kHost,
+  kHTTP2Session,
+  kHTTP2SessionState,
+  kHTTP2BuildRequest,
+  kHTTP2CopyHeaders,
+  kHTTP1BuildRequest
+} = __nccwpck_require__(4856)
+
+/** @type {import('http2')} */
+let http2
+try {
+  http2 = __nccwpck_require__(5675)
+} catch {
+  // @ts-ignore
+  http2 = { constants: {} }
+}
+
+const {
+  constants: {
+    HTTP2_HEADER_AUTHORITY,
+    HTTP2_HEADER_METHOD,
+    HTTP2_HEADER_PATH,
+    HTTP2_HEADER_SCHEME,
+    HTTP2_HEADER_CONTENT_LENGTH,
+    HTTP2_HEADER_EXPECT,
+    HTTP2_HEADER_STATUS
+  }
+} = http2
+
+// Experimental
+let h2ExperimentalWarned = false
+
+const FastBuffer = Buffer[Symbol.species]
+
+const kClosedResolve = Symbol('kClosedResolve')
+
+const channels = {}
+
+try {
+  const diagnosticsChannel = __nccwpck_require__(1637)
+  channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders')
+  channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect')
+  channels.connectError = diagnosticsChannel.channel('undici:client:connectError')
+  channels.connected = diagnosticsChannel.channel('undici:client:connected')
+} catch {
+  channels.sendHeaders = { hasSubscribers: false }
+  channels.beforeConnect = { hasSubscribers: false }
+  channels.connectError = { hasSubscribers: false }
+  channels.connected = { hasSubscribers: false }
+}
+
+/**
+ * @type {import('../types/client').default}
+ */
+class Client extends DispatcherBase {
+  /**
+   *
+   * @param {string|URL} url
+   * @param {import('../types/client').Client.Options} options
+   */
+  constructor (url, {
+    interceptors,
+    maxHeaderSize,
+    headersTimeout,
+    socketTimeout,
+    requestTimeout,
+    connectTimeout,
+    bodyTimeout,
+    idleTimeout,
+    keepAlive,
+    keepAliveTimeout,
+    maxKeepAliveTimeout,
+    keepAliveMaxTimeout,
+    keepAliveTimeoutThreshold,
+    socketPath,
+    pipelining,
+    tls,
+    strictContentLength,
+    maxCachedSessions,
+    maxRedirections,
+    connect,
+    maxRequestsPerClient,
+    localAddress,
+    maxResponseSize,
+    autoSelectFamily,
+    autoSelectFamilyAttemptTimeout,
+    // h2
+    allowH2,
+    maxConcurrentStreams
+  } = {}) {
+    super()
+
+    if (keepAlive !== undefined) {
+      throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead')
+    }
+
+    if (socketTimeout !== undefined) {
+      throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead')
+    }
+
+    if (requestTimeout !== undefined) {
+      throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead')
+    }
+
+    if (idleTimeout !== undefined) {
+      throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead')
+    }
+
+    if (maxKeepAliveTimeout !== undefined) {
+      throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead')
+    }
+
+    if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) {
+      throw new InvalidArgumentError('invalid maxHeaderSize')
+    }
+
+    if (socketPath != null && typeof socketPath !== 'string') {
+      throw new InvalidArgumentError('invalid socketPath')
+    }
+
+    if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) {
+      throw new InvalidArgumentError('invalid connectTimeout')
+    }
+
+    if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) {
+      throw new InvalidArgumentError('invalid keepAliveTimeout')
+    }
+
+    if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) {
+      throw new InvalidArgumentError('invalid keepAliveMaxTimeout')
+    }
+
+    if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) {
+      throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold')
+    }
+
+    if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) {
+      throw new InvalidArgumentError('headersTimeout must be a positive integer or zero')
+    }
+
+    if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) {
+      throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero')
+    }
+
+    if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+      throw new InvalidArgumentError('connect must be a function or an object')
+    }
+
+    if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
+      throw new InvalidArgumentError('maxRedirections must be a positive number')
+    }
+
+    if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) {
+      throw new InvalidArgumentError('maxRequestsPerClient must be a positive number')
+    }
+
+    if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) {
+      throw new InvalidArgumentError('localAddress must be valid string IP address')
+    }
+
+    if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) {
+      throw new InvalidArgumentError('maxResponseSize must be a positive number')
+    }
+
+    if (
+      autoSelectFamilyAttemptTimeout != null &&
+      (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)
+    ) {
+      throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
+    }
+
+    // h2
+    if (allowH2 != null && typeof allowH2 !== 'boolean') {
+      throw new InvalidArgumentError('allowH2 must be a valid boolean value')
+    }
+
+    if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
+      throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0')
+    }
+
+    if (typeof connect !== 'function') {
+      connect = buildConnector({
+        ...tls,
+        maxCachedSessions,
+        allowH2,
+        socketPath,
+        timeout: connectTimeout,
+        ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
+        ...connect
+      })
+    }
+
+    this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client)
+      ? interceptors.Client
+      : [createRedirectInterceptor({ maxRedirections })]
+    this[kUrl] = util.parseOrigin(url)
+    this[kConnector] = connect
+    this[kSocket] = null
+    this[kPipelining] = pipelining != null ? pipelining : 1
+    this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize
+    this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout
+    this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout
+    this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold
+    this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout]
+    this[kServerName] = null
+    this[kLocalAddress] = localAddress != null ? localAddress : null
+    this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming
+    this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming
+    this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n`
+    this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3
+    this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3
+    this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength
+    this[kMaxRedirections] = maxRedirections
+    this[kMaxRequests] = maxRequestsPerClient
+    this[kClosedResolve] = null
+    this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
+    this[kHTTPConnVersion] = 'h1'
+
+    // HTTP/2
+    this[kHTTP2Session] = null
+    this[kHTTP2SessionState] = !allowH2
+      ? null
+      : {
+        // streams: null, // Fixed queue of streams - For future support of `push`
+          openStreams: 0, // Keep track of them to decide wether or not unref the session
+          maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
+        }
+    this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}`
+
+    // kQueue is built up of 3 sections separated by
+    // the kRunningIdx and kPendingIdx indices.
+    // |   complete   |   running   |   pending   |
+    //                ^ kRunningIdx ^ kPendingIdx ^ kQueue.length
+    // kRunningIdx points to the first running element.
+    // kPendingIdx points to the first pending element.
+    // This implements a fast queue with an amortized
+    // time of O(1).
+
+    this[kQueue] = []
+    this[kRunningIdx] = 0
+    this[kPendingIdx] = 0
+  }
+
+  get pipelining () {
+    return this[kPipelining]
+  }
+
+  set pipelining (value) {
+    this[kPipelining] = value
+    resume(this, true)
+  }
+
+  get [kPending] () {
+    return this[kQueue].length - this[kPendingIdx]
+  }
+
+  get [kRunning] () {
+    return this[kPendingIdx] - this[kRunningIdx]
+  }
+
+  get [kSize] () {
+    return this[kQueue].length - this[kRunningIdx]
+  }
+
+  get [kConnected] () {
+    return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed
+  }
+
+  get [kBusy] () {
+    const socket = this[kSocket]
+    return (
+      (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) ||
+      (this[kSize] >= (this[kPipelining] || 1)) ||
+      this[kPending] > 0
+    )
+  }
+
+  /* istanbul ignore: only used for test */
+  [kConnect] (cb) {
+    connect(this)
+    this.once('connect', cb)
+  }
+
+  [kDispatch] (opts, handler) {
+    const origin = opts.origin || this[kUrl].origin
+
+    const request = this[kHTTPConnVersion] === 'h2'
+      ? Request[kHTTP2BuildRequest](origin, opts, handler)
+      : Request[kHTTP1BuildRequest](origin, opts, handler)
+
+    this[kQueue].push(request)
+    if (this[kResuming]) {
+      // Do nothing.
+    } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
+      // Wait a tick in case stream/iterator is ended in the same tick.
+      this[kResuming] = 1
+      process.nextTick(resume, this)
+    } else {
+      resume(this, true)
+    }
+
+    if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) {
+      this[kNeedDrain] = 2
+    }
+
+    return this[kNeedDrain] < 2
+  }
+
+  async [kClose] () {
+    // TODO: for H2 we need to gracefully flush the remaining enqueued
+    // request and close each stream.
+    return new Promise((resolve) => {
+      if (!this[kSize]) {
+        resolve(null)
+      } else {
+        this[kClosedResolve] = resolve
+      }
+    })
+  }
+
+  async [kDestroy] (err) {
+    return new Promise((resolve) => {
+      const requests = this[kQueue].splice(this[kPendingIdx])
+      for (let i = 0; i < requests.length; i++) {
+        const request = requests[i]
+        errorRequest(this, request, err)
+      }
+
+      const callback = () => {
+        if (this[kClosedResolve]) {
+          // TODO (fix): Should we error here with ClientDestroyedError?
+          this[kClosedResolve]()
+          this[kClosedResolve] = null
+        }
+        resolve()
+      }
+
+      if (this[kHTTP2Session] != null) {
+        util.destroy(this[kHTTP2Session], err)
+        this[kHTTP2Session] = null
+        this[kHTTP2SessionState] = null
+      }
+
+      if (!this[kSocket]) {
+        queueMicrotask(callback)
+      } else {
+        util.destroy(this[kSocket].on('close', callback), err)
+      }
+
+      resume(this)
+    })
+  }
+}
+
+function onHttp2SessionError (err) {
+  assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+  this[kSocket][kError] = err
+
+  onError(this[kClient], err)
+}
+
+function onHttp2FrameError (type, code, id) {
+  const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+
+  if (id === 0) {
+    this[kSocket][kError] = err
+    onError(this[kClient], err)
+  }
+}
+
+function onHttp2SessionEnd () {
+  util.destroy(this, new SocketError('other side closed'))
+  util.destroy(this[kSocket], new SocketError('other side closed'))
+}
+
+function onHTTP2GoAway (code) {
+  const client = this[kClient]
+  const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`)
+  client[kSocket] = null
+  client[kHTTP2Session] = null
+
+  if (client.destroyed) {
+    assert(this[kPending] === 0)
+
+    // Fail entire queue.
+    const requests = client[kQueue].splice(client[kRunningIdx])
+    for (let i = 0; i < requests.length; i++) {
+      const request = requests[i]
+      errorRequest(this, request, err)
+    }
+  } else if (client[kRunning] > 0) {
+    // Fail head of pipeline.
+    const request = client[kQueue][client[kRunningIdx]]
+    client[kQueue][client[kRunningIdx]++] = null
+
+    errorRequest(client, request, err)
+  }
+
+  client[kPendingIdx] = client[kRunningIdx]
+
+  assert(client[kRunning] === 0)
+
+  client.emit('disconnect',
+    client[kUrl],
+    [client],
+    err
+  )
+
+  resume(client)
+}
+
+const constants = __nccwpck_require__(307)
+const createRedirectInterceptor = __nccwpck_require__(2130)
+const EMPTY_BUF = Buffer.alloc(0)
+
+async function lazyllhttp () {
+  const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(9741) : undefined
+
+  let mod
+  try {
+    mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(5919), 'base64'))
+  } catch (e) {
+    /* istanbul ignore next */
+
+    // We could check if the error was caused by the simd option not
+    // being enabled, but the occurring of this other error
+    // * https://github.com/emscripten-core/emscripten/issues/11495
+    // got me to remove that check to avoid breaking Node 12.
+    mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(9741), 'base64'))
+  }
+
+  return await WebAssembly.instantiate(mod, {
+    env: {
+      /* eslint-disable camelcase */
+
+      wasm_on_url: (p, at, len) => {
+        /* istanbul ignore next */
+        return 0
+      },
+      wasm_on_status: (p, at, len) => {
+        assert.strictEqual(currentParser.ptr, p)
+        const start = at - currentBufferPtr + currentBufferRef.byteOffset
+        return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+      },
+      wasm_on_message_begin: (p) => {
+        assert.strictEqual(currentParser.ptr, p)
+        return currentParser.onMessageBegin() || 0
+      },
+      wasm_on_header_field: (p, at, len) => {
+        assert.strictEqual(currentParser.ptr, p)
+        const start = at - currentBufferPtr + currentBufferRef.byteOffset
+        return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+      },
+      wasm_on_header_value: (p, at, len) => {
+        assert.strictEqual(currentParser.ptr, p)
+        const start = at - currentBufferPtr + currentBufferRef.byteOffset
+        return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+      },
+      wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => {
+        assert.strictEqual(currentParser.ptr, p)
+        return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0
+      },
+      wasm_on_body: (p, at, len) => {
+        assert.strictEqual(currentParser.ptr, p)
+        const start = at - currentBufferPtr + currentBufferRef.byteOffset
+        return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0
+      },
+      wasm_on_message_complete: (p) => {
+        assert.strictEqual(currentParser.ptr, p)
+        return currentParser.onMessageComplete() || 0
+      }
+
+      /* eslint-enable camelcase */
+    }
+  })
+}
+
+let llhttpInstance = null
+let llhttpPromise = lazyllhttp()
+llhttpPromise.catch()
+
+let currentParser = null
+let currentBufferRef = null
+let currentBufferSize = 0
+let currentBufferPtr = null
+
+const TIMEOUT_HEADERS = 1
+const TIMEOUT_BODY = 2
+const TIMEOUT_IDLE = 3
+
+class Parser {
+  constructor (client, socket, { exports }) {
+    assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0)
+
+    this.llhttp = exports
+    this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE)
+    this.client = client
+    this.socket = socket
+    this.timeout = null
+    this.timeoutValue = null
+    this.timeoutType = null
+    this.statusCode = null
+    this.statusText = ''
+    this.upgrade = false
+    this.headers = []
+    this.headersSize = 0
+    this.headersMaxSize = client[kMaxHeadersSize]
+    this.shouldKeepAlive = false
+    this.paused = false
+    this.resume = this.resume.bind(this)
+
+    this.bytesRead = 0
+
+    this.keepAlive = ''
+    this.contentLength = ''
+    this.connection = ''
+    this.maxResponseSize = client[kMaxResponseSize]
+  }
+
+  setTimeout (value, type) {
+    this.timeoutType = type
+    if (value !== this.timeoutValue) {
+      timers.clearTimeout(this.timeout)
+      if (value) {
+        this.timeout = timers.setTimeout(onParserTimeout, value, this)
+        // istanbul ignore else: only for jest
+        if (this.timeout.unref) {
+          this.timeout.unref()
+        }
+      } else {
+        this.timeout = null
+      }
+      this.timeoutValue = value
+    } else if (this.timeout) {
+      // istanbul ignore else: only for jest
+      if (this.timeout.refresh) {
+        this.timeout.refresh()
+      }
+    }
+  }
+
+  resume () {
+    if (this.socket.destroyed || !this.paused) {
+      return
+    }
+
+    assert(this.ptr != null)
+    assert(currentParser == null)
+
+    this.llhttp.llhttp_resume(this.ptr)
+
+    assert(this.timeoutType === TIMEOUT_BODY)
+    if (this.timeout) {
+      // istanbul ignore else: only for jest
+      if (this.timeout.refresh) {
+        this.timeout.refresh()
+      }
+    }
+
+    this.paused = false
+    this.execute(this.socket.read() || EMPTY_BUF) // Flush parser.
+    this.readMore()
+  }
+
+  readMore () {
+    while (!this.paused && this.ptr) {
+      const chunk = this.socket.read()
+      if (chunk === null) {
+        break
+      }
+      this.execute(chunk)
+    }
+  }
+
+  execute (data) {
+    assert(this.ptr != null)
+    assert(currentParser == null)
+    assert(!this.paused)
+
+    const { socket, llhttp } = this
+
+    if (data.length > currentBufferSize) {
+      if (currentBufferPtr) {
+        llhttp.free(currentBufferPtr)
+      }
+      currentBufferSize = Math.ceil(data.length / 4096) * 4096
+      currentBufferPtr = llhttp.malloc(currentBufferSize)
+    }
+
+    new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data)
+
+    // Call `execute` on the wasm parser.
+    // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data,
+    // and finally the length of bytes to parse.
+    // The return value is an error code or `constants.ERROR.OK`.
+    try {
+      let ret
+
+      try {
+        currentBufferRef = data
+        currentParser = this
+        ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length)
+        /* eslint-disable-next-line no-useless-catch */
+      } catch (err) {
+        /* istanbul ignore next: difficult to make a test case for */
+        throw err
+      } finally {
+        currentParser = null
+        currentBufferRef = null
+      }
+
+      const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr
+
+      if (ret === constants.ERROR.PAUSED_UPGRADE) {
+        this.onUpgrade(data.slice(offset))
+      } else if (ret === constants.ERROR.PAUSED) {
+        this.paused = true
+        socket.unshift(data.slice(offset))
+      } else if (ret !== constants.ERROR.OK) {
+        const ptr = llhttp.llhttp_get_error_reason(this.ptr)
+        let message = ''
+        /* istanbul ignore else: difficult to make a test case for */
+        if (ptr) {
+          const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0)
+          message =
+            'Response does not match the HTTP/1.1 protocol (' +
+            Buffer.from(llhttp.memory.buffer, ptr, len).toString() +
+            ')'
+        }
+        throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset))
+      }
+    } catch (err) {
+      util.destroy(socket, err)
+    }
+  }
+
+  destroy () {
+    assert(this.ptr != null)
+    assert(currentParser == null)
+
+    this.llhttp.llhttp_free(this.ptr)
+    this.ptr = null
+
+    timers.clearTimeout(this.timeout)
+    this.timeout = null
+    this.timeoutValue = null
+    this.timeoutType = null
+
+    this.paused = false
+  }
+
+  onStatus (buf) {
+    this.statusText = buf.toString()
+  }
+
+  onMessageBegin () {
+    const { socket, client } = this
+
+    /* istanbul ignore next: difficult to make a test case for */
+    if (socket.destroyed) {
+      return -1
+    }
+
+    const request = client[kQueue][client[kRunningIdx]]
+    if (!request) {
+      return -1
+    }
+  }
+
+  onHeaderField (buf) {
+    const len = this.headers.length
+
+    if ((len & 1) === 0) {
+      this.headers.push(buf)
+    } else {
+      this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])
+    }
+
+    this.trackHeader(buf.length)
+  }
+
+  onHeaderValue (buf) {
+    let len = this.headers.length
+
+    if ((len & 1) === 1) {
+      this.headers.push(buf)
+      len += 1
+    } else {
+      this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf])
+    }
+
+    const key = this.headers[len - 2]
+    if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') {
+      this.keepAlive += buf.toString()
+    } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') {
+      this.connection += buf.toString()
+    } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') {
+      this.contentLength += buf.toString()
+    }
+
+    this.trackHeader(buf.length)
+  }
+
+  trackHeader (len) {
+    this.headersSize += len
+    if (this.headersSize >= this.headersMaxSize) {
+      util.destroy(this.socket, new HeadersOverflowError())
+    }
+  }
+
+  onUpgrade (head) {
+    const { upgrade, client, socket, headers, statusCode } = this
+
+    assert(upgrade)
+
+    const request = client[kQueue][client[kRunningIdx]]
+    assert(request)
+
+    assert(!socket.destroyed)
+    assert(socket === client[kSocket])
+    assert(!this.paused)
+    assert(request.upgrade || request.method === 'CONNECT')
+
+    this.statusCode = null
+    this.statusText = ''
+    this.shouldKeepAlive = null
+
+    assert(this.headers.length % 2 === 0)
+    this.headers = []
+    this.headersSize = 0
+
+    socket.unshift(head)
+
+    socket[kParser].destroy()
+    socket[kParser] = null
+
+    socket[kClient] = null
+    socket[kError] = null
+    socket
+      .removeListener('error', onSocketError)
+      .removeListener('readable', onSocketReadable)
+      .removeListener('end', onSocketEnd)
+      .removeListener('close', onSocketClose)
+
+    client[kSocket] = null
+    client[kQueue][client[kRunningIdx]++] = null
+    client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade'))
+
+    try {
+      request.onUpgrade(statusCode, headers, socket)
+    } catch (err) {
+      util.destroy(socket, err)
+    }
+
+    resume(client)
+  }
+
+  onHeadersComplete (statusCode, upgrade, shouldKeepAlive) {
+    const { client, socket, headers, statusText } = this
+
+    /* istanbul ignore next: difficult to make a test case for */
+    if (socket.destroyed) {
+      return -1
+    }
+
+    const request = client[kQueue][client[kRunningIdx]]
+
+    /* istanbul ignore next: difficult to make a test case for */
+    if (!request) {
+      return -1
+    }
+
+    assert(!this.upgrade)
+    assert(this.statusCode < 200)
+
+    if (statusCode === 100) {
+      util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket)))
+      return -1
+    }
+
+    /* this can only happen if server is misbehaving */
+    if (upgrade && !request.upgrade) {
+      util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket)))
+      return -1
+    }
+
+    assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS)
+
+    this.statusCode = statusCode
+    this.shouldKeepAlive = (
+      shouldKeepAlive ||
+      // Override llhttp value which does not allow keepAlive for HEAD.
+      (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive')
+    )
+
+    if (this.statusCode >= 200) {
+      const bodyTimeout = request.bodyTimeout != null
+        ? request.bodyTimeout
+        : client[kBodyTimeout]
+      this.setTimeout(bodyTimeout, TIMEOUT_BODY)
+    } else if (this.timeout) {
+      // istanbul ignore else: only for jest
+      if (this.timeout.refresh) {
+        this.timeout.refresh()
+      }
+    }
+
+    if (request.method === 'CONNECT') {
+      assert(client[kRunning] === 1)
+      this.upgrade = true
+      return 2
+    }
+
+    if (upgrade) {
+      assert(client[kRunning] === 1)
+      this.upgrade = true
+      return 2
+    }
+
+    assert(this.headers.length % 2 === 0)
+    this.headers = []
+    this.headersSize = 0
+
+    if (this.shouldKeepAlive && client[kPipelining]) {
+      const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null
+
+      if (keepAliveTimeout != null) {
+        const timeout = Math.min(
+          keepAliveTimeout - client[kKeepAliveTimeoutThreshold],
+          client[kKeepAliveMaxTimeout]
+        )
+        if (timeout <= 0) {
+          socket[kReset] = true
+        } else {
+          client[kKeepAliveTimeoutValue] = timeout
+        }
+      } else {
+        client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout]
+      }
+    } else {
+      // Stop more requests from being dispatched.
+      socket[kReset] = true
+    }
+
+    const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false
+
+    if (request.aborted) {
+      return -1
+    }
+
+    if (request.method === 'HEAD') {
+      return 1
+    }
+
+    if (statusCode < 200) {
+      return 1
+    }
+
+    if (socket[kBlocking]) {
+      socket[kBlocking] = false
+      resume(client)
+    }
+
+    return pause ? constants.ERROR.PAUSED : 0
+  }
+
+  onBody (buf) {
+    const { client, socket, statusCode, maxResponseSize } = this
+
+    if (socket.destroyed) {
+      return -1
+    }
+
+    const request = client[kQueue][client[kRunningIdx]]
+    assert(request)
+
+    assert.strictEqual(this.timeoutType, TIMEOUT_BODY)
+    if (this.timeout) {
+      // istanbul ignore else: only for jest
+      if (this.timeout.refresh) {
+        this.timeout.refresh()
+      }
+    }
+
+    assert(statusCode >= 200)
+
+    if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) {
+      util.destroy(socket, new ResponseExceededMaxSizeError())
+      return -1
+    }
+
+    this.bytesRead += buf.length
+
+    if (request.onData(buf) === false) {
+      return constants.ERROR.PAUSED
+    }
+  }
+
+  onMessageComplete () {
+    const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this
+
+    if (socket.destroyed && (!statusCode || shouldKeepAlive)) {
+      return -1
+    }
+
+    if (upgrade) {
+      return
+    }
+
+    const request = client[kQueue][client[kRunningIdx]]
+    assert(request)
+
+    assert(statusCode >= 100)
+
+    this.statusCode = null
+    this.statusText = ''
+    this.bytesRead = 0
+    this.contentLength = ''
+    this.keepAlive = ''
+    this.connection = ''
+
+    assert(this.headers.length % 2 === 0)
+    this.headers = []
+    this.headersSize = 0
+
+    if (statusCode < 200) {
+      return
+    }
+
+    /* istanbul ignore next: should be handled by llhttp? */
+    if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) {
+      util.destroy(socket, new ResponseContentLengthMismatchError())
+      return -1
+    }
+
+    request.onComplete(headers)
+
+    client[kQueue][client[kRunningIdx]++] = null
+
+    if (socket[kWriting]) {
+      assert.strictEqual(client[kRunning], 0)
+      // Response completed before request.
+      util.destroy(socket, new InformationalError('reset'))
+      return constants.ERROR.PAUSED
+    } else if (!shouldKeepAlive) {
+      util.destroy(socket, new InformationalError('reset'))
+      return constants.ERROR.PAUSED
+    } else if (socket[kReset] && client[kRunning] === 0) {
+      // Destroy socket once all requests have completed.
+      // The request at the tail of the pipeline is the one
+      // that requested reset and no further requests should
+      // have been queued since then.
+      util.destroy(socket, new InformationalError('reset'))
+      return constants.ERROR.PAUSED
+    } else if (client[kPipelining] === 1) {
+      // We must wait a full event loop cycle to reuse this socket to make sure
+      // that non-spec compliant servers are not closing the connection even if they
+      // said they won't.
+      setImmediate(resume, client)
+    } else {
+      resume(client)
+    }
+  }
+}
+
+function onParserTimeout (parser) {
+  const { socket, timeoutType, client } = parser
+
+  /* istanbul ignore else */
+  if (timeoutType === TIMEOUT_HEADERS) {
+    if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) {
+      assert(!parser.paused, 'cannot be paused while waiting for headers')
+      util.destroy(socket, new HeadersTimeoutError())
+    }
+  } else if (timeoutType === TIMEOUT_BODY) {
+    if (!parser.paused) {
+      util.destroy(socket, new BodyTimeoutError())
+    }
+  } else if (timeoutType === TIMEOUT_IDLE) {
+    assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue])
+    util.destroy(socket, new InformationalError('socket idle timeout'))
+  }
+}
+
+function onSocketReadable () {
+  const { [kParser]: parser } = this
+  if (parser) {
+    parser.readMore()
+  }
+}
+
+function onSocketError (err) {
+  const { [kClient]: client, [kParser]: parser } = this
+
+  assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+  if (client[kHTTPConnVersion] !== 'h2') {
+    // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
+    // to the user.
+    if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {
+      // We treat all incoming data so for as a valid response.
+      parser.onMessageComplete()
+      return
+    }
+  }
+
+  this[kError] = err
+
+  onError(this[kClient], err)
+}
+
+function onError (client, err) {
+  if (
+    client[kRunning] === 0 &&
+    err.code !== 'UND_ERR_INFO' &&
+    err.code !== 'UND_ERR_SOCKET'
+  ) {
+    // Error is not caused by running request and not a recoverable
+    // socket error.
+
+    assert(client[kPendingIdx] === client[kRunningIdx])
+
+    const requests = client[kQueue].splice(client[kRunningIdx])
+    for (let i = 0; i < requests.length; i++) {
+      const request = requests[i]
+      errorRequest(client, request, err)
+    }
+    assert(client[kSize] === 0)
+  }
+}
+
+function onSocketEnd () {
+  const { [kParser]: parser, [kClient]: client } = this
+
+  if (client[kHTTPConnVersion] !== 'h2') {
+    if (parser.statusCode && !parser.shouldKeepAlive) {
+      // We treat all incoming data so far as a valid response.
+      parser.onMessageComplete()
+      return
+    }
+  }
+
+  util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
+}
+
+function onSocketClose () {
+  const { [kClient]: client, [kParser]: parser } = this
+
+  if (client[kHTTPConnVersion] === 'h1' && parser) {
+    if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {
+      // We treat all incoming data so far as a valid response.
+      parser.onMessageComplete()
+    }
+
+    this[kParser].destroy()
+    this[kParser] = null
+  }
+
+  const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
+
+  client[kSocket] = null
+
+  if (client.destroyed) {
+    assert(client[kPending] === 0)
+
+    // Fail entire queue.
+    const requests = client[kQueue].splice(client[kRunningIdx])
+    for (let i = 0; i < requests.length; i++) {
+      const request = requests[i]
+      errorRequest(client, request, err)
+    }
+  } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') {
+    // Fail head of pipeline.
+    const request = client[kQueue][client[kRunningIdx]]
+    client[kQueue][client[kRunningIdx]++] = null
+
+    errorRequest(client, request, err)
+  }
+
+  client[kPendingIdx] = client[kRunningIdx]
+
+  assert(client[kRunning] === 0)
+
+  client.emit('disconnect', client[kUrl], [client], err)
+
+  resume(client)
+}
+
+async function connect (client) {
+  assert(!client[kConnecting])
+  assert(!client[kSocket])
+
+  let { host, hostname, protocol, port } = client[kUrl]
+
+  // Resolve ipv6
+  if (hostname[0] === '[') {
+    const idx = hostname.indexOf(']')
+
+    assert(idx !== -1)
+    const ip = hostname.substring(1, idx)
+
+    assert(net.isIP(ip))
+    hostname = ip
+  }
+
+  client[kConnecting] = true
+
+  if (channels.beforeConnect.hasSubscribers) {
+    channels.beforeConnect.publish({
+      connectParams: {
+        host,
+        hostname,
+        protocol,
+        port,
+        servername: client[kServerName],
+        localAddress: client[kLocalAddress]
+      },
+      connector: client[kConnector]
+    })
+  }
+
+  try {
+    const socket = await new Promise((resolve, reject) => {
+      client[kConnector]({
+        host,
+        hostname,
+        protocol,
+        port,
+        servername: client[kServerName],
+        localAddress: client[kLocalAddress]
+      }, (err, socket) => {
+        if (err) {
+          reject(err)
+        } else {
+          resolve(socket)
+        }
+      })
+    })
+
+    if (client.destroyed) {
+      util.destroy(socket.on('error', () => {}), new ClientDestroyedError())
+      return
+    }
+
+    client[kConnecting] = false
+
+    assert(socket)
+
+    const isH2 = socket.alpnProtocol === 'h2'
+    if (isH2) {
+      if (!h2ExperimentalWarned) {
+        h2ExperimentalWarned = true
+        process.emitWarning('H2 support is experimental, expect them to change at any time.', {
+          code: 'UNDICI-H2'
+        })
+      }
+
+      const session = http2.connect(client[kUrl], {
+        createConnection: () => socket,
+        peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams
+      })
+
+      client[kHTTPConnVersion] = 'h2'
+      session[kClient] = client
+      session[kSocket] = socket
+      session.on('error', onHttp2SessionError)
+      session.on('frameError', onHttp2FrameError)
+      session.on('end', onHttp2SessionEnd)
+      session.on('goaway', onHTTP2GoAway)
+      session.on('close', onSocketClose)
+      session.unref()
+
+      client[kHTTP2Session] = session
+      socket[kHTTP2Session] = session
+    } else {
+      if (!llhttpInstance) {
+        llhttpInstance = await llhttpPromise
+        llhttpPromise = null
+      }
+
+      socket[kNoRef] = false
+      socket[kWriting] = false
+      socket[kReset] = false
+      socket[kBlocking] = false
+      socket[kParser] = new Parser(client, socket, llhttpInstance)
+    }
+
+    socket[kCounter] = 0
+    socket[kMaxRequests] = client[kMaxRequests]
+    socket[kClient] = client
+    socket[kError] = null
+
+    socket
+      .on('error', onSocketError)
+      .on('readable', onSocketReadable)
+      .on('end', onSocketEnd)
+      .on('close', onSocketClose)
+
+    client[kSocket] = socket
+
+    if (channels.connected.hasSubscribers) {
+      channels.connected.publish({
+        connectParams: {
+          host,
+          hostname,
+          protocol,
+          port,
+          servername: client[kServerName],
+          localAddress: client[kLocalAddress]
+        },
+        connector: client[kConnector],
+        socket
+      })
+    }
+    client.emit('connect', client[kUrl], [client])
+  } catch (err) {
+    if (client.destroyed) {
+      return
+    }
+
+    client[kConnecting] = false
+
+    if (channels.connectError.hasSubscribers) {
+      channels.connectError.publish({
+        connectParams: {
+          host,
+          hostname,
+          protocol,
+          port,
+          servername: client[kServerName],
+          localAddress: client[kLocalAddress]
+        },
+        connector: client[kConnector],
+        error: err
+      })
+    }
+
+    if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') {
+      assert(client[kRunning] === 0)
+      while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) {
+        const request = client[kQueue][client[kPendingIdx]++]
+        errorRequest(client, request, err)
+      }
+    } else {
+      onError(client, err)
+    }
+
+    client.emit('connectionError', client[kUrl], [client], err)
+  }
+
+  resume(client)
+}
+
+function emitDrain (client) {
+  client[kNeedDrain] = 0
+  client.emit('drain', client[kUrl], [client])
+}
+
+function resume (client, sync) {
+  if (client[kResuming] === 2) {
+    return
+  }
+
+  client[kResuming] = 2
+
+  _resume(client, sync)
+  client[kResuming] = 0
+
+  if (client[kRunningIdx] > 256) {
+    client[kQueue].splice(0, client[kRunningIdx])
+    client[kPendingIdx] -= client[kRunningIdx]
+    client[kRunningIdx] = 0
+  }
+}
+
+function _resume (client, sync) {
+  while (true) {
+    if (client.destroyed) {
+      assert(client[kPending] === 0)
+      return
+    }
+
+    if (client[kClosedResolve] && !client[kSize]) {
+      client[kClosedResolve]()
+      client[kClosedResolve] = null
+      return
+    }
+
+    const socket = client[kSocket]
+
+    if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') {
+      if (client[kSize] === 0) {
+        if (!socket[kNoRef] && socket.unref) {
+          socket.unref()
+          socket[kNoRef] = true
+        }
+      } else if (socket[kNoRef] && socket.ref) {
+        socket.ref()
+        socket[kNoRef] = false
+      }
+
+      if (client[kSize] === 0) {
+        if (socket[kParser].timeoutType !== TIMEOUT_IDLE) {
+          socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE)
+        }
+      } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) {
+        if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) {
+          const request = client[kQueue][client[kRunningIdx]]
+          const headersTimeout = request.headersTimeout != null
+            ? request.headersTimeout
+            : client[kHeadersTimeout]
+          socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS)
+        }
+      }
+    }
+
+    if (client[kBusy]) {
+      client[kNeedDrain] = 2
+    } else if (client[kNeedDrain] === 2) {
+      if (sync) {
+        client[kNeedDrain] = 1
+        process.nextTick(emitDrain, client)
+      } else {
+        emitDrain(client)
+      }
+      continue
+    }
+
+    if (client[kPending] === 0) {
+      return
+    }
+
+    if (client[kRunning] >= (client[kPipelining] || 1)) {
+      return
+    }
+
+    const request = client[kQueue][client[kPendingIdx]]
+
+    if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) {
+      if (client[kRunning] > 0) {
+        return
+      }
+
+      client[kServerName] = request.servername
+
+      if (socket && socket.servername !== request.servername) {
+        util.destroy(socket, new InformationalError('servername changed'))
+        return
+      }
+    }
+
+    if (client[kConnecting]) {
+      return
+    }
+
+    if (!socket && !client[kHTTP2Session]) {
+      connect(client)
+      return
+    }
+
+    if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) {
+      return
+    }
+
+    if (client[kRunning] > 0 && !request.idempotent) {
+      // Non-idempotent request cannot be retried.
+      // Ensure that no other requests are inflight and
+      // could cause failure.
+      return
+    }
+
+    if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) {
+      // Don't dispatch an upgrade until all preceding requests have completed.
+      // A misbehaving server might upgrade the connection before all pipelined
+      // request has completed.
+      return
+    }
+
+    if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 &&
+      (util.isStream(request.body) || util.isAsyncIterable(request.body))) {
+      // Request with stream or iterator body can error while other requests
+      // are inflight and indirectly error those as well.
+      // Ensure this doesn't happen by waiting for inflight
+      // to complete before dispatching.
+
+      // Request with stream or iterator body cannot be retried.
+      // Ensure that no other requests are inflight and
+      // could cause failure.
+      return
+    }
+
+    if (!request.aborted && write(client, request)) {
+      client[kPendingIdx]++
+    } else {
+      client[kQueue].splice(client[kPendingIdx], 1)
+    }
+  }
+}
+
+// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2
+function shouldSendContentLength (method) {
+  return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT'
+}
+
+function write (client, request) {
+  if (client[kHTTPConnVersion] === 'h2') {
+    writeH2(client, client[kHTTP2Session], request)
+    return
+  }
+
+  const { body, method, path, host, upgrade, headers, blocking, reset } = request
+
+  // https://tools.ietf.org/html/rfc7231#section-4.3.1
+  // https://tools.ietf.org/html/rfc7231#section-4.3.2
+  // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+  // Sending a payload body on a request that does not
+  // expect it can cause undefined behavior on some
+  // servers and corrupt connection state. Do not
+  // re-use the connection for further requests.
+
+  const expectsPayload = (
+    method === 'PUT' ||
+    method === 'POST' ||
+    method === 'PATCH'
+  )
+
+  if (body && typeof body.read === 'function') {
+    // Try to read EOF in order to get length.
+    body.read(0)
+  }
+
+  const bodyLength = util.bodyLength(body)
+
+  let contentLength = bodyLength
+
+  if (contentLength === null) {
+    contentLength = request.contentLength
+  }
+
+  if (contentLength === 0 && !expectsPayload) {
+    // https://tools.ietf.org/html/rfc7230#section-3.3.2
+    // A user agent SHOULD NOT send a Content-Length header field when
+    // the request message does not contain a payload body and the method
+    // semantics do not anticipate such a body.
+
+    contentLength = null
+  }
+
+  // https://github.com/nodejs/undici/issues/2046
+  // A user agent may send a Content-Length header with 0 value, this should be allowed.
+  if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) {
+    if (client[kStrictContentLength]) {
+      errorRequest(client, request, new RequestContentLengthMismatchError())
+      return false
+    }
+
+    process.emitWarning(new RequestContentLengthMismatchError())
+  }
+
+  const socket = client[kSocket]
+
+  try {
+    request.onConnect((err) => {
+      if (request.aborted || request.completed) {
+        return
+      }
+
+      errorRequest(client, request, err || new RequestAbortedError())
+
+      util.destroy(socket, new InformationalError('aborted'))
+    })
+  } catch (err) {
+    errorRequest(client, request, err)
+  }
+
+  if (request.aborted) {
+    return false
+  }
+
+  if (method === 'HEAD') {
+    // https://github.com/mcollina/undici/issues/258
+    // Close after a HEAD request to interop with misbehaving servers
+    // that may send a body in the response.
+
+    socket[kReset] = true
+  }
+
+  if (upgrade || method === 'CONNECT') {
+    // On CONNECT or upgrade, block pipeline from dispatching further
+    // requests on this connection.
+
+    socket[kReset] = true
+  }
+
+  if (reset != null) {
+    socket[kReset] = reset
+  }
+
+  if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) {
+    socket[kReset] = true
+  }
+
+  if (blocking) {
+    socket[kBlocking] = true
+  }
+
+  let header = `${method} ${path} HTTP/1.1\r\n`
+
+  if (typeof host === 'string') {
+    header += `host: ${host}\r\n`
+  } else {
+    header += client[kHostHeader]
+  }
+
+  if (upgrade) {
+    header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n`
+  } else if (client[kPipelining] && !socket[kReset]) {
+    header += 'connection: keep-alive\r\n'
+  } else {
+    header += 'connection: close\r\n'
+  }
+
+  if (headers) {
+    header += headers
+  }
+
+  if (channels.sendHeaders.hasSubscribers) {
+    channels.sendHeaders.publish({ request, headers: header, socket })
+  }
+
+  /* istanbul ignore else: assertion */
+  if (!body || bodyLength === 0) {
+    if (contentLength === 0) {
+      socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
+    } else {
+      assert(contentLength === null, 'no body must not have content length')
+      socket.write(`${header}\r\n`, 'latin1')
+    }
+    request.onRequestSent()
+  } else if (util.isBuffer(body)) {
+    assert(contentLength === body.byteLength, 'buffer body must have content length')
+
+    socket.cork()
+    socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+    socket.write(body)
+    socket.uncork()
+    request.onBodySent(body)
+    request.onRequestSent()
+    if (!expectsPayload) {
+      socket[kReset] = true
+    }
+  } else if (util.isBlobLike(body)) {
+    if (typeof body.stream === 'function') {
+      writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload })
+    } else {
+      writeBlob({ body, client, request, socket, contentLength, header, expectsPayload })
+    }
+  } else if (util.isStream(body)) {
+    writeStream({ body, client, request, socket, contentLength, header, expectsPayload })
+  } else if (util.isIterable(body)) {
+    writeIterable({ body, client, request, socket, contentLength, header, expectsPayload })
+  } else {
+    assert(false)
+  }
+
+  return true
+}
+
+function writeH2 (client, session, request) {
+  const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
+
+  let headers
+  if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim())
+  else headers = reqHeaders
+
+  if (upgrade) {
+    errorRequest(client, request, new Error('Upgrade not supported for H2'))
+    return false
+  }
+
+  try {
+    // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?
+    request.onConnect((err) => {
+      if (request.aborted || request.completed) {
+        return
+      }
+
+      errorRequest(client, request, err || new RequestAbortedError())
+    })
+  } catch (err) {
+    errorRequest(client, request, err)
+  }
+
+  if (request.aborted) {
+    return false
+  }
+
+  /** @type {import('node:http2').ClientHttp2Stream} */
+  let stream
+  const h2State = client[kHTTP2SessionState]
+
+  headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]
+  headers[HTTP2_HEADER_METHOD] = method
+
+  if (method === 'CONNECT') {
+    session.ref()
+    // we are already connected, streams are pending, first request
+    // will create a new stream. We trigger a request to create the stream and wait until
+    // `ready` event is triggered
+    // We disabled endStream to allow the user to write to the stream
+    stream = session.request(headers, { endStream: false, signal })
+
+    if (stream.id && !stream.pending) {
+      request.onUpgrade(null, null, stream)
+      ++h2State.openStreams
+    } else {
+      stream.once('ready', () => {
+        request.onUpgrade(null, null, stream)
+        ++h2State.openStreams
+      })
+    }
+
+    stream.once('close', () => {
+      h2State.openStreams -= 1
+      // TODO(HTTP/2): unref only if current streams count is 0
+      if (h2State.openStreams === 0) session.unref()
+    })
+
+    return true
+  }
+
+  // https://tools.ietf.org/html/rfc7540#section-8.3
+  // :path and :scheme headers must be omited when sending CONNECT
+
+  headers[HTTP2_HEADER_PATH] = path
+  headers[HTTP2_HEADER_SCHEME] = 'https'
+
+  // https://tools.ietf.org/html/rfc7231#section-4.3.1
+  // https://tools.ietf.org/html/rfc7231#section-4.3.2
+  // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+  // Sending a payload body on a request that does not
+  // expect it can cause undefined behavior on some
+  // servers and corrupt connection state. Do not
+  // re-use the connection for further requests.
+
+  const expectsPayload = (
+    method === 'PUT' ||
+    method === 'POST' ||
+    method === 'PATCH'
+  )
+
+  if (body && typeof body.read === 'function') {
+    // Try to read EOF in order to get length.
+    body.read(0)
+  }
+
+  let contentLength = util.bodyLength(body)
+
+  if (contentLength == null) {
+    contentLength = request.contentLength
+  }
+
+  if (contentLength === 0 || !expectsPayload) {
+    // https://tools.ietf.org/html/rfc7230#section-3.3.2
+    // A user agent SHOULD NOT send a Content-Length header field when
+    // the request message does not contain a payload body and the method
+    // semantics do not anticipate such a body.
+
+    contentLength = null
+  }
+
+  // https://github.com/nodejs/undici/issues/2046
+  // A user agent may send a Content-Length header with 0 value, this should be allowed.
+  if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) {
+    if (client[kStrictContentLength]) {
+      errorRequest(client, request, new RequestContentLengthMismatchError())
+      return false
+    }
+
+    process.emitWarning(new RequestContentLengthMismatchError())
+  }
+
+  if (contentLength != null) {
+    assert(body, 'no body must not have content length')
+    headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
+  }
+
+  session.ref()
+
+  const shouldEndStream = method === 'GET' || method === 'HEAD'
+  if (expectContinue) {
+    headers[HTTP2_HEADER_EXPECT] = '100-continue'
+    stream = session.request(headers, { endStream: shouldEndStream, signal })
+
+    stream.once('continue', writeBodyH2)
+  } else {
+    stream = session.request(headers, {
+      endStream: shouldEndStream,
+      signal
+    })
+    writeBodyH2()
+  }
+
+  // Increment counter as we have new several streams open
+  ++h2State.openStreams
+
+  stream.once('response', headers => {
+    const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
+
+    if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) {
+      stream.pause()
+    }
+  })
+
+  stream.once('end', () => {
+    request.onComplete([])
+  })
+
+  stream.on('data', (chunk) => {
+    if (request.onData(chunk) === false) {
+      stream.pause()
+    }
+  })
+
+  stream.once('close', () => {
+    h2State.openStreams -= 1
+    // TODO(HTTP/2): unref only if current streams count is 0
+    if (h2State.openStreams === 0) {
+      session.unref()
+    }
+  })
+
+  stream.once('error', function (err) {
+    if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+      h2State.streams -= 1
+      util.destroy(stream, err)
+    }
+  })
+
+  stream.once('frameError', (type, code) => {
+    const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+    errorRequest(client, request, err)
+
+    if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+      h2State.streams -= 1
+      util.destroy(stream, err)
+    }
+  })
+
+  // stream.on('aborted', () => {
+  //   // TODO(HTTP/2): Support aborted
+  // })
+
+  // stream.on('timeout', () => {
+  //   // TODO(HTTP/2): Support timeout
+  // })
+
+  // stream.on('push', headers => {
+  //   // TODO(HTTP/2): Suppor push
+  // })
+
+  // stream.on('trailers', headers => {
+  //   // TODO(HTTP/2): Support trailers
+  // })
+
+  return true
+
+  function writeBodyH2 () {
+    /* istanbul ignore else: assertion */
+    if (!body) {
+      request.onRequestSent()
+    } else if (util.isBuffer(body)) {
+      assert(contentLength === body.byteLength, 'buffer body must have content length')
+      stream.cork()
+      stream.write(body)
+      stream.uncork()
+      stream.end()
+      request.onBodySent(body)
+      request.onRequestSent()
+    } else if (util.isBlobLike(body)) {
+      if (typeof body.stream === 'function') {
+        writeIterable({
+          client,
+          request,
+          contentLength,
+          h2stream: stream,
+          expectsPayload,
+          body: body.stream(),
+          socket: client[kSocket],
+          header: ''
+        })
+      } else {
+        writeBlob({
+          body,
+          client,
+          request,
+          contentLength,
+          expectsPayload,
+          h2stream: stream,
+          header: '',
+          socket: client[kSocket]
+        })
+      }
+    } else if (util.isStream(body)) {
+      writeStream({
+        body,
+        client,
+        request,
+        contentLength,
+        expectsPayload,
+        socket: client[kSocket],
+        h2stream: stream,
+        header: ''
+      })
+    } else if (util.isIterable(body)) {
+      writeIterable({
+        body,
+        client,
+        request,
+        contentLength,
+        expectsPayload,
+        header: '',
+        h2stream: stream,
+        socket: client[kSocket]
+      })
+    } else {
+      assert(false)
+    }
+  }
+}
+
+function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+  assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
+
+  if (client[kHTTPConnVersion] === 'h2') {
+    // For HTTP/2, is enough to pipe the stream
+    const pipe = pipeline(
+      body,
+      h2stream,
+      (err) => {
+        if (err) {
+          util.destroy(body, err)
+          util.destroy(h2stream, err)
+        } else {
+          request.onRequestSent()
+        }
+      }
+    )
+
+    pipe.on('data', onPipeData)
+    pipe.once('end', () => {
+      pipe.removeListener('data', onPipeData)
+      util.destroy(pipe)
+    })
+
+    function onPipeData (chunk) {
+      request.onBodySent(chunk)
+    }
+
+    return
+  }
+
+  let finished = false
+
+  const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
+
+  const onData = function (chunk) {
+    if (finished) {
+      return
+    }
+
+    try {
+      if (!writer.write(chunk) && this.pause) {
+        this.pause()
+      }
+    } catch (err) {
+      util.destroy(this, err)
+    }
+  }
+  const onDrain = function () {
+    if (finished) {
+      return
+    }
+
+    if (body.resume) {
+      body.resume()
+    }
+  }
+  const onAbort = function () {
+    if (finished) {
+      return
+    }
+    const err = new RequestAbortedError()
+    queueMicrotask(() => onFinished(err))
+  }
+  const onFinished = function (err) {
+    if (finished) {
+      return
+    }
+
+    finished = true
+
+    assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1))
+
+    socket
+      .off('drain', onDrain)
+      .off('error', onFinished)
+
+    body
+      .removeListener('data', onData)
+      .removeListener('end', onFinished)
+      .removeListener('error', onFinished)
+      .removeListener('close', onAbort)
+
+    if (!err) {
+      try {
+        writer.end()
+      } catch (er) {
+        err = er
+      }
+    }
+
+    writer.destroy(err)
+
+    if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) {
+      util.destroy(body, err)
+    } else {
+      util.destroy(body)
+    }
+  }
+
+  body
+    .on('data', onData)
+    .on('end', onFinished)
+    .on('error', onFinished)
+    .on('close', onAbort)
+
+  if (body.resume) {
+    body.resume()
+  }
+
+  socket
+    .on('drain', onDrain)
+    .on('error', onFinished)
+}
+
+async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+  assert(contentLength === body.size, 'blob body must have content length')
+
+  const isH2 = client[kHTTPConnVersion] === 'h2'
+  try {
+    if (contentLength != null && contentLength !== body.size) {
+      throw new RequestContentLengthMismatchError()
+    }
+
+    const buffer = Buffer.from(await body.arrayBuffer())
+
+    if (isH2) {
+      h2stream.cork()
+      h2stream.write(buffer)
+      h2stream.uncork()
+    } else {
+      socket.cork()
+      socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+      socket.write(buffer)
+      socket.uncork()
+    }
+
+    request.onBodySent(buffer)
+    request.onRequestSent()
+
+    if (!expectsPayload) {
+      socket[kReset] = true
+    }
+
+    resume(client)
+  } catch (err) {
+    util.destroy(isH2 ? h2stream : socket, err)
+  }
+}
+
+async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
+  assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
+
+  let callback = null
+  function onDrain () {
+    if (callback) {
+      const cb = callback
+      callback = null
+      cb()
+    }
+  }
+
+  const waitForDrain = () => new Promise((resolve, reject) => {
+    assert(callback === null)
+
+    if (socket[kError]) {
+      reject(socket[kError])
+    } else {
+      callback = resolve
+    }
+  })
+
+  if (client[kHTTPConnVersion] === 'h2') {
+    h2stream
+      .on('close', onDrain)
+      .on('drain', onDrain)
+
+    try {
+      // It's up to the user to somehow abort the async iterable.
+      for await (const chunk of body) {
+        if (socket[kError]) {
+          throw socket[kError]
+        }
+
+        const res = h2stream.write(chunk)
+        request.onBodySent(chunk)
+        if (!res) {
+          await waitForDrain()
+        }
+      }
+    } catch (err) {
+      h2stream.destroy(err)
+    } finally {
+      request.onRequestSent()
+      h2stream.end()
+      h2stream
+        .off('close', onDrain)
+        .off('drain', onDrain)
+    }
+
+    return
+  }
+
+  socket
+    .on('close', onDrain)
+    .on('drain', onDrain)
+
+  const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
+  try {
+    // It's up to the user to somehow abort the async iterable.
+    for await (const chunk of body) {
+      if (socket[kError]) {
+        throw socket[kError]
+      }
+
+      if (!writer.write(chunk)) {
+        await waitForDrain()
+      }
+    }
+
+    writer.end()
+  } catch (err) {
+    writer.destroy(err)
+  } finally {
+    socket
+      .off('close', onDrain)
+      .off('drain', onDrain)
+  }
+}
+
+class AsyncWriter {
+  constructor ({ socket, request, contentLength, client, expectsPayload, header }) {
+    this.socket = socket
+    this.request = request
+    this.contentLength = contentLength
+    this.client = client
+    this.bytesWritten = 0
+    this.expectsPayload = expectsPayload
+    this.header = header
+
+    socket[kWriting] = true
+  }
+
+  write (chunk) {
+    const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this
+
+    if (socket[kError]) {
+      throw socket[kError]
+    }
+
+    if (socket.destroyed) {
+      return false
+    }
+
+    const len = Buffer.byteLength(chunk)
+    if (!len) {
+      return true
+    }
+
+    // We should defer writing chunks.
+    if (contentLength !== null && bytesWritten + len > contentLength) {
+      if (client[kStrictContentLength]) {
+        throw new RequestContentLengthMismatchError()
+      }
+
+      process.emitWarning(new RequestContentLengthMismatchError())
+    }
+
+    socket.cork()
+
+    if (bytesWritten === 0) {
+      if (!expectsPayload) {
+        socket[kReset] = true
+      }
+
+      if (contentLength === null) {
+        socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1')
+      } else {
+        socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+      }
+    }
+
+    if (contentLength === null) {
+      socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1')
+    }
+
+    this.bytesWritten += len
+
+    const ret = socket.write(chunk)
+
+    socket.uncork()
+
+    request.onBodySent(chunk)
+
+    if (!ret) {
+      if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {
+        // istanbul ignore else: only for jest
+        if (socket[kParser].timeout.refresh) {
+          socket[kParser].timeout.refresh()
+        }
+      }
+    }
+
+    return ret
+  }
+
+  end () {
+    const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this
+    request.onRequestSent()
+
+    socket[kWriting] = false
+
+    if (socket[kError]) {
+      throw socket[kError]
+    }
+
+    if (socket.destroyed) {
+      return
+    }
+
+    if (bytesWritten === 0) {
+      if (expectsPayload) {
+        // https://tools.ietf.org/html/rfc7230#section-3.3.2
+        // A user agent SHOULD send a Content-Length in a request message when
+        // no Transfer-Encoding is sent and the request method defines a meaning
+        // for an enclosed payload body.
+
+        socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1')
+      } else {
+        socket.write(`${header}\r\n`, 'latin1')
+      }
+    } else if (contentLength === null) {
+      socket.write('\r\n0\r\n\r\n', 'latin1')
+    }
+
+    if (contentLength !== null && bytesWritten !== contentLength) {
+      if (client[kStrictContentLength]) {
+        throw new RequestContentLengthMismatchError()
+      } else {
+        process.emitWarning(new RequestContentLengthMismatchError())
+      }
+    }
+
+    if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) {
+      // istanbul ignore else: only for jest
+      if (socket[kParser].timeout.refresh) {
+        socket[kParser].timeout.refresh()
+      }
+    }
+
+    resume(client)
+  }
+
+  destroy (err) {
+    const { socket, client } = this
+
+    socket[kWriting] = false
+
+    if (err) {
+      assert(client[kRunning] <= 1, 'pipeline should only contain this request')
+      util.destroy(socket, err)
+    }
+  }
+}
+
+function errorRequest (client, request, err) {
+  try {
+    request.onError(err)
+    assert(request.aborted)
+  } catch (err) {
+    client.emit('error', err)
+  }
+}
+
+module.exports = Client
+
+
+/***/ }),
+
+/***/ 547:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+/* istanbul ignore file: only for Node 12 */
+
+const { kConnected, kSize } = __nccwpck_require__(4856)
+
+class CompatWeakRef {
+  constructor (value) {
+    this.value = value
+  }
+
+  deref () {
+    return this.value[kConnected] === 0 && this.value[kSize] === 0
+      ? undefined
+      : this.value
+  }
+}
+
+class CompatFinalizer {
+  constructor (finalizer) {
+    this.finalizer = finalizer
+  }
+
+  register (dispatcher, key) {
+    if (dispatcher.on) {
+      dispatcher.on('disconnect', () => {
+        if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) {
+          this.finalizer(key)
+        }
+      })
+    }
+  }
+}
+
+module.exports = function () {
+  // FIXME: remove workaround when the Node bug is fixed
+  // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+  if (process.env.NODE_V8_COVERAGE) {
+    return {
+      WeakRef: CompatWeakRef,
+      FinalizationRegistry: CompatFinalizer
+    }
+  }
+  return {
+    WeakRef: global.WeakRef || CompatWeakRef,
+    FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer
+  }
+}
+
+
+/***/ }),
+
+/***/ 516:
+/***/ ((module) => {
+
+
+
+// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size
+const maxAttributeValueSize = 1024
+
+// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size
+const maxNameValuePairSize = 4096
+
+module.exports = {
+  maxAttributeValueSize,
+  maxNameValuePairSize
+}
+
+
+/***/ }),
+
+/***/ 8829:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { parseSetCookie } = __nccwpck_require__(7042)
+const { stringify, getHeadersList } = __nccwpck_require__(3325)
+const { webidl } = __nccwpck_require__(29)
+const { Headers } = __nccwpck_require__(2908)
+
+/**
+ * @typedef {Object} Cookie
+ * @property {string} name
+ * @property {string} value
+ * @property {Date|number|undefined} expires
+ * @property {number|undefined} maxAge
+ * @property {string|undefined} domain
+ * @property {string|undefined} path
+ * @property {boolean|undefined} secure
+ * @property {boolean|undefined} httpOnly
+ * @property {'Strict'|'Lax'|'None'} sameSite
+ * @property {string[]} unparsed
+ */
+
+/**
+ * @param {Headers} headers
+ * @returns {Record<string, string>}
+ */
+function getCookies (headers) {
+  webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' })
+
+  webidl.brandCheck(headers, Headers, { strict: false })
+
+  const cookie = headers.get('cookie')
+  const out = {}
+
+  if (!cookie) {
+    return out
+  }
+
+  for (const piece of cookie.split(';')) {
+    const [name, ...value] = piece.split('=')
+
+    out[name.trim()] = value.join('=')
+  }
+
+  return out
+}
+
+/**
+ * @param {Headers} headers
+ * @param {string} name
+ * @param {{ path?: string, domain?: string }|undefined} attributes
+ * @returns {void}
+ */
+function deleteCookie (headers, name, attributes) {
+  webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' })
+
+  webidl.brandCheck(headers, Headers, { strict: false })
+
+  name = webidl.converters.DOMString(name)
+  attributes = webidl.converters.DeleteCookieAttributes(attributes)
+
+  // Matches behavior of
+  // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278
+  setCookie(headers, {
+    name,
+    value: '',
+    expires: new Date(0),
+    ...attributes
+  })
+}
+
+/**
+ * @param {Headers} headers
+ * @returns {Cookie[]}
+ */
+function getSetCookies (headers) {
+  webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' })
+
+  webidl.brandCheck(headers, Headers, { strict: false })
+
+  const cookies = getHeadersList(headers).cookies
+
+  if (!cookies) {
+    return []
+  }
+
+  // In older versions of undici, cookies is a list of name:value.
+  return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair))
+}
+
+/**
+ * @param {Headers} headers
+ * @param {Cookie} cookie
+ * @returns {void}
+ */
+function setCookie (headers, cookie) {
+  webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' })
+
+  webidl.brandCheck(headers, Headers, { strict: false })
+
+  cookie = webidl.converters.Cookie(cookie)
+
+  const str = stringify(cookie)
+
+  if (str) {
+    headers.append('Set-Cookie', stringify(cookie))
+  }
+}
+
+webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([
+  {
+    converter: webidl.nullableConverter(webidl.converters.DOMString),
+    key: 'path',
+    defaultValue: null
+  },
+  {
+    converter: webidl.nullableConverter(webidl.converters.DOMString),
+    key: 'domain',
+    defaultValue: null
+  }
+])
+
+webidl.converters.Cookie = webidl.dictionaryConverter([
+  {
+    converter: webidl.converters.DOMString,
+    key: 'name'
+  },
+  {
+    converter: webidl.converters.DOMString,
+    key: 'value'
+  },
+  {
+    converter: webidl.nullableConverter((value) => {
+      if (typeof value === 'number') {
+        return webidl.converters['unsigned long long'](value)
+      }
+
+      return new Date(value)
+    }),
+    key: 'expires',
+    defaultValue: null
+  },
+  {
+    converter: webidl.nullableConverter(webidl.converters['long long']),
+    key: 'maxAge',
+    defaultValue: null
+  },
+  {
+    converter: webidl.nullableConverter(webidl.converters.DOMString),
+    key: 'domain',
+    defaultValue: null
+  },
+  {
+    converter: webidl.nullableConverter(webidl.converters.DOMString),
+    key: 'path',
+    defaultValue: null
+  },
+  {
+    converter: webidl.nullableConverter(webidl.converters.boolean),
+    key: 'secure',
+    defaultValue: null
+  },
+  {
+    converter: webidl.nullableConverter(webidl.converters.boolean),
+    key: 'httpOnly',
+    defaultValue: null
+  },
+  {
+    converter: webidl.converters.USVString,
+    key: 'sameSite',
+    allowedValues: ['Strict', 'Lax', 'None']
+  },
+  {
+    converter: webidl.sequenceConverter(webidl.converters.DOMString),
+    key: 'unparsed',
+    defaultValue: []
+  }
+])
+
+module.exports = {
+  getCookies,
+  deleteCookie,
+  getSetCookies,
+  setCookie
+}
+
+
+/***/ }),
+
+/***/ 7042:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(516)
+const { isCTLExcludingHtab } = __nccwpck_require__(3325)
+const { collectASequenceOfCodePointsFast } = __nccwpck_require__(3911)
+const assert = __nccwpck_require__(2613)
+
+/**
+ * @description Parses the field-value attributes of a set-cookie header string.
+ * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
+ * @param {string} header
+ * @returns if the header is invalid, null will be returned
+ */
+function parseSetCookie (header) {
+  // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F
+  //    character (CTL characters excluding HTAB): Abort these steps and
+  //    ignore the set-cookie-string entirely.
+  if (isCTLExcludingHtab(header)) {
+    return null
+  }
+
+  let nameValuePair = ''
+  let unparsedAttributes = ''
+  let name = ''
+  let value = ''
+
+  // 2. If the set-cookie-string contains a %x3B (";") character:
+  if (header.includes(';')) {
+    // 1. The name-value-pair string consists of the characters up to,
+    //    but not including, the first %x3B (";"), and the unparsed-
+    //    attributes consist of the remainder of the set-cookie-string
+    //    (including the %x3B (";") in question).
+    const position = { position: 0 }
+
+    nameValuePair = collectASequenceOfCodePointsFast(';', header, position)
+    unparsedAttributes = header.slice(position.position)
+  } else {
+    // Otherwise:
+
+    // 1. The name-value-pair string consists of all the characters
+    //    contained in the set-cookie-string, and the unparsed-
+    //    attributes is the empty string.
+    nameValuePair = header
+  }
+
+  // 3. If the name-value-pair string lacks a %x3D ("=") character, then
+  //    the name string is empty, and the value string is the value of
+  //    name-value-pair.
+  if (!nameValuePair.includes('=')) {
+    value = nameValuePair
+  } else {
+    //    Otherwise, the name string consists of the characters up to, but
+    //    not including, the first %x3D ("=") character, and the (possibly
+    //    empty) value string consists of the characters after the first
+    //    %x3D ("=") character.
+    const position = { position: 0 }
+    name = collectASequenceOfCodePointsFast(
+      '=',
+      nameValuePair,
+      position
+    )
+    value = nameValuePair.slice(position.position + 1)
+  }
+
+  // 4. Remove any leading or trailing WSP characters from the name
+  //    string and the value string.
+  name = name.trim()
+  value = value.trim()
+
+  // 5. If the sum of the lengths of the name string and the value string
+  //    is more than 4096 octets, abort these steps and ignore the set-
+  //    cookie-string entirely.
+  if (name.length + value.length > maxNameValuePairSize) {
+    return null
+  }
+
+  // 6. The cookie-name is the name string, and the cookie-value is the
+  //    value string.
+  return {
+    name, value, ...parseUnparsedAttributes(unparsedAttributes)
+  }
+}
+
+/**
+ * Parses the remaining attributes of a set-cookie header
+ * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4
+ * @param {string} unparsedAttributes
+ * @param {[Object.<string, unknown>]={}} cookieAttributeList
+ */
+function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) {
+  // 1. If the unparsed-attributes string is empty, skip the rest of
+  //    these steps.
+  if (unparsedAttributes.length === 0) {
+    return cookieAttributeList
+  }
+
+  // 2. Discard the first character of the unparsed-attributes (which
+  //    will be a %x3B (";") character).
+  assert(unparsedAttributes[0] === ';')
+  unparsedAttributes = unparsedAttributes.slice(1)
+
+  let cookieAv = ''
+
+  // 3. If the remaining unparsed-attributes contains a %x3B (";")
+  //    character:
+  if (unparsedAttributes.includes(';')) {
+    // 1. Consume the characters of the unparsed-attributes up to, but
+    //    not including, the first %x3B (";") character.
+    cookieAv = collectASequenceOfCodePointsFast(
+      ';',
+      unparsedAttributes,
+      { position: 0 }
+    )
+    unparsedAttributes = unparsedAttributes.slice(cookieAv.length)
+  } else {
+    // Otherwise:
+
+    // 1. Consume the remainder of the unparsed-attributes.
+    cookieAv = unparsedAttributes
+    unparsedAttributes = ''
+  }
+
+  // Let the cookie-av string be the characters consumed in this step.
+
+  let attributeName = ''
+  let attributeValue = ''
+
+  // 4. If the cookie-av string contains a %x3D ("=") character:
+  if (cookieAv.includes('=')) {
+    // 1. The (possibly empty) attribute-name string consists of the
+    //    characters up to, but not including, the first %x3D ("=")
+    //    character, and the (possibly empty) attribute-value string
+    //    consists of the characters after the first %x3D ("=")
+    //    character.
+    const position = { position: 0 }
+
+    attributeName = collectASequenceOfCodePointsFast(
+      '=',
+      cookieAv,
+      position
+    )
+    attributeValue = cookieAv.slice(position.position + 1)
+  } else {
+    // Otherwise:
+
+    // 1. The attribute-name string consists of the entire cookie-av
+    //    string, and the attribute-value string is empty.
+    attributeName = cookieAv
+  }
+
+  // 5. Remove any leading or trailing WSP characters from the attribute-
+  //    name string and the attribute-value string.
+  attributeName = attributeName.trim()
+  attributeValue = attributeValue.trim()
+
+  // 6. If the attribute-value is longer than 1024 octets, ignore the
+  //    cookie-av string and return to Step 1 of this algorithm.
+  if (attributeValue.length > maxAttributeValueSize) {
+    return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+  }
+
+  // 7. Process the attribute-name and attribute-value according to the
+  //    requirements in the following subsections.  (Notice that
+  //    attributes with unrecognized attribute-names are ignored.)
+  const attributeNameLowercase = attributeName.toLowerCase()
+
+  // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1
+  // If the attribute-name case-insensitively matches the string
+  // "Expires", the user agent MUST process the cookie-av as follows.
+  if (attributeNameLowercase === 'expires') {
+    // 1. Let the expiry-time be the result of parsing the attribute-value
+    //    as cookie-date (see Section 5.1.1).
+    const expiryTime = new Date(attributeValue)
+
+    // 2. If the attribute-value failed to parse as a cookie date, ignore
+    //    the cookie-av.
+
+    cookieAttributeList.expires = expiryTime
+  } else if (attributeNameLowercase === 'max-age') {
+    // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2
+    // If the attribute-name case-insensitively matches the string "Max-
+    // Age", the user agent MUST process the cookie-av as follows.
+
+    // 1. If the first character of the attribute-value is not a DIGIT or a
+    //    "-" character, ignore the cookie-av.
+    const charCode = attributeValue.charCodeAt(0)
+
+    if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') {
+      return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+    }
+
+    // 2. If the remainder of attribute-value contains a non-DIGIT
+    //    character, ignore the cookie-av.
+    if (!/^\d+$/.test(attributeValue)) {
+      return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+    }
+
+    // 3. Let delta-seconds be the attribute-value converted to an integer.
+    const deltaSeconds = Number(attributeValue)
+
+    // 4. Let cookie-age-limit be the maximum age of the cookie (which
+    //    SHOULD be 400 days or less, see Section 4.1.2.2).
+
+    // 5. Set delta-seconds to the smaller of its present value and cookie-
+    //    age-limit.
+    // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs)
+
+    // 6. If delta-seconds is less than or equal to zero (0), let expiry-
+    //    time be the earliest representable date and time.  Otherwise, let
+    //    the expiry-time be the current date and time plus delta-seconds
+    //    seconds.
+    // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds
+
+    // 7. Append an attribute to the cookie-attribute-list with an
+    //    attribute-name of Max-Age and an attribute-value of expiry-time.
+    cookieAttributeList.maxAge = deltaSeconds
+  } else if (attributeNameLowercase === 'domain') {
+    // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3
+    // If the attribute-name case-insensitively matches the string "Domain",
+    // the user agent MUST process the cookie-av as follows.
+
+    // 1. Let cookie-domain be the attribute-value.
+    let cookieDomain = attributeValue
+
+    // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be
+    //    cookie-domain without its leading %x2E (".").
+    if (cookieDomain[0] === '.') {
+      cookieDomain = cookieDomain.slice(1)
+    }
+
+    // 3. Convert the cookie-domain to lower case.
+    cookieDomain = cookieDomain.toLowerCase()
+
+    // 4. Append an attribute to the cookie-attribute-list with an
+    //    attribute-name of Domain and an attribute-value of cookie-domain.
+    cookieAttributeList.domain = cookieDomain
+  } else if (attributeNameLowercase === 'path') {
+    // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4
+    // If the attribute-name case-insensitively matches the string "Path",
+    // the user agent MUST process the cookie-av as follows.
+
+    // 1. If the attribute-value is empty or if the first character of the
+    //    attribute-value is not %x2F ("/"):
+    let cookiePath = ''
+    if (attributeValue.length === 0 || attributeValue[0] !== '/') {
+      // 1. Let cookie-path be the default-path.
+      cookiePath = '/'
+    } else {
+      // Otherwise:
+
+      // 1. Let cookie-path be the attribute-value.
+      cookiePath = attributeValue
+    }
+
+    // 2. Append an attribute to the cookie-attribute-list with an
+    //    attribute-name of Path and an attribute-value of cookie-path.
+    cookieAttributeList.path = cookiePath
+  } else if (attributeNameLowercase === 'secure') {
+    // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5
+    // If the attribute-name case-insensitively matches the string "Secure",
+    // the user agent MUST append an attribute to the cookie-attribute-list
+    // with an attribute-name of Secure and an empty attribute-value.
+
+    cookieAttributeList.secure = true
+  } else if (attributeNameLowercase === 'httponly') {
+    // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6
+    // If the attribute-name case-insensitively matches the string
+    // "HttpOnly", the user agent MUST append an attribute to the cookie-
+    // attribute-list with an attribute-name of HttpOnly and an empty
+    // attribute-value.
+
+    cookieAttributeList.httpOnly = true
+  } else if (attributeNameLowercase === 'samesite') {
+    // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7
+    // If the attribute-name case-insensitively matches the string
+    // "SameSite", the user agent MUST process the cookie-av as follows:
+
+    // 1. Let enforcement be "Default".
+    let enforcement = 'Default'
+
+    const attributeValueLowercase = attributeValue.toLowerCase()
+    // 2. If cookie-av's attribute-value is a case-insensitive match for
+    //    "None", set enforcement to "None".
+    if (attributeValueLowercase.includes('none')) {
+      enforcement = 'None'
+    }
+
+    // 3. If cookie-av's attribute-value is a case-insensitive match for
+    //    "Strict", set enforcement to "Strict".
+    if (attributeValueLowercase.includes('strict')) {
+      enforcement = 'Strict'
+    }
+
+    // 4. If cookie-av's attribute-value is a case-insensitive match for
+    //    "Lax", set enforcement to "Lax".
+    if (attributeValueLowercase.includes('lax')) {
+      enforcement = 'Lax'
+    }
+
+    // 5. Append an attribute to the cookie-attribute-list with an
+    //    attribute-name of "SameSite" and an attribute-value of
+    //    enforcement.
+    cookieAttributeList.sameSite = enforcement
+  } else {
+    cookieAttributeList.unparsed ??= []
+
+    cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`)
+  }
+
+  // 8. Return to Step 1 of this algorithm.
+  return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList)
+}
+
+module.exports = {
+  parseSetCookie,
+  parseUnparsedAttributes
+}
+
+
+/***/ }),
+
+/***/ 3325:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const assert = __nccwpck_require__(2613)
+const { kHeadersList } = __nccwpck_require__(4856)
+
+function isCTLExcludingHtab (value) {
+  if (value.length === 0) {
+    return false
+  }
+
+  for (const char of value) {
+    const code = char.charCodeAt(0)
+
+    if (
+      (code >= 0x00 || code <= 0x08) ||
+      (code >= 0x0A || code <= 0x1F) ||
+      code === 0x7F
+    ) {
+      return false
+    }
+  }
+}
+
+/**
+ CHAR           = <any US-ASCII character (octets 0 - 127)>
+ token          = 1*<any CHAR except CTLs or separators>
+ separators     = "(" | ")" | "<" | ">" | "@"
+                | "," | ";" | ":" | "\" | <">
+                | "/" | "[" | "]" | "?" | "="
+                | "{" | "}" | SP | HT
+ * @param {string} name
+ */
+function validateCookieName (name) {
+  for (const char of name) {
+    const code = char.charCodeAt(0)
+
+    if (
+      (code <= 0x20 || code > 0x7F) ||
+      char === '(' ||
+      char === ')' ||
+      char === '>' ||
+      char === '<' ||
+      char === '@' ||
+      char === ',' ||
+      char === ';' ||
+      char === ':' ||
+      char === '\\' ||
+      char === '"' ||
+      char === '/' ||
+      char === '[' ||
+      char === ']' ||
+      char === '?' ||
+      char === '=' ||
+      char === '{' ||
+      char === '}'
+    ) {
+      throw new Error('Invalid cookie name')
+    }
+  }
+}
+
+/**
+ cookie-value      = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE )
+ cookie-octet      = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E
+                       ; US-ASCII characters excluding CTLs,
+                       ; whitespace DQUOTE, comma, semicolon,
+                       ; and backslash
+ * @param {string} value
+ */
+function validateCookieValue (value) {
+  for (const char of value) {
+    const code = char.charCodeAt(0)
+
+    if (
+      code < 0x21 || // exclude CTLs (0-31)
+      code === 0x22 ||
+      code === 0x2C ||
+      code === 0x3B ||
+      code === 0x5C ||
+      code > 0x7E // non-ascii
+    ) {
+      throw new Error('Invalid header value')
+    }
+  }
+}
+
+/**
+ * path-value        = <any CHAR except CTLs or ";">
+ * @param {string} path
+ */
+function validateCookiePath (path) {
+  for (const char of path) {
+    const code = char.charCodeAt(0)
+
+    if (code < 0x21 || char === ';') {
+      throw new Error('Invalid cookie path')
+    }
+  }
+}
+
+/**
+ * I have no idea why these values aren't allowed to be honest,
+ * but Deno tests these. - Khafra
+ * @param {string} domain
+ */
+function validateCookieDomain (domain) {
+  if (
+    domain.startsWith('-') ||
+    domain.endsWith('.') ||
+    domain.endsWith('-')
+  ) {
+    throw new Error('Invalid cookie domain')
+  }
+}
+
+/**
+ * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1
+ * @param {number|Date} date
+  IMF-fixdate  = day-name "," SP date1 SP time-of-day SP GMT
+  ; fixed length/zone/capitalization subset of the format
+  ; see Section 3.3 of [RFC5322]
+
+  day-name     = %x4D.6F.6E ; "Mon", case-sensitive
+              / %x54.75.65 ; "Tue", case-sensitive
+              / %x57.65.64 ; "Wed", case-sensitive
+              / %x54.68.75 ; "Thu", case-sensitive
+              / %x46.72.69 ; "Fri", case-sensitive
+              / %x53.61.74 ; "Sat", case-sensitive
+              / %x53.75.6E ; "Sun", case-sensitive
+  date1        = day SP month SP year
+                  ; e.g., 02 Jun 1982
+
+  day          = 2DIGIT
+  month        = %x4A.61.6E ; "Jan", case-sensitive
+              / %x46.65.62 ; "Feb", case-sensitive
+              / %x4D.61.72 ; "Mar", case-sensitive
+              / %x41.70.72 ; "Apr", case-sensitive
+              / %x4D.61.79 ; "May", case-sensitive
+              / %x4A.75.6E ; "Jun", case-sensitive
+              / %x4A.75.6C ; "Jul", case-sensitive
+              / %x41.75.67 ; "Aug", case-sensitive
+              / %x53.65.70 ; "Sep", case-sensitive
+              / %x4F.63.74 ; "Oct", case-sensitive
+              / %x4E.6F.76 ; "Nov", case-sensitive
+              / %x44.65.63 ; "Dec", case-sensitive
+  year         = 4DIGIT
+
+  GMT          = %x47.4D.54 ; "GMT", case-sensitive
+
+  time-of-day  = hour ":" minute ":" second
+              ; 00:00:00 - 23:59:60 (leap second)
+
+  hour         = 2DIGIT
+  minute       = 2DIGIT
+  second       = 2DIGIT
+ */
+function toIMFDate (date) {
+  if (typeof date === 'number') {
+    date = new Date(date)
+  }
+
+  const days = [
+    'Sun', 'Mon', 'Tue', 'Wed',
+    'Thu', 'Fri', 'Sat'
+  ]
+
+  const months = [
+    'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+    'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
+  ]
+
+  const dayName = days[date.getUTCDay()]
+  const day = date.getUTCDate().toString().padStart(2, '0')
+  const month = months[date.getUTCMonth()]
+  const year = date.getUTCFullYear()
+  const hour = date.getUTCHours().toString().padStart(2, '0')
+  const minute = date.getUTCMinutes().toString().padStart(2, '0')
+  const second = date.getUTCSeconds().toString().padStart(2, '0')
+
+  return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT`
+}
+
+/**
+ max-age-av        = "Max-Age=" non-zero-digit *DIGIT
+                       ; In practice, both expires-av and max-age-av
+                       ; are limited to dates representable by the
+                       ; user agent.
+ * @param {number} maxAge
+ */
+function validateCookieMaxAge (maxAge) {
+  if (maxAge < 0) {
+    throw new Error('Invalid cookie max-age')
+  }
+}
+
+/**
+ * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1
+ * @param {import('./index').Cookie} cookie
+ */
+function stringify (cookie) {
+  if (cookie.name.length === 0) {
+    return null
+  }
+
+  validateCookieName(cookie.name)
+  validateCookieValue(cookie.value)
+
+  const out = [`${cookie.name}=${cookie.value}`]
+
+  // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1
+  // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2
+  if (cookie.name.startsWith('__Secure-')) {
+    cookie.secure = true
+  }
+
+  if (cookie.name.startsWith('__Host-')) {
+    cookie.secure = true
+    cookie.domain = null
+    cookie.path = '/'
+  }
+
+  if (cookie.secure) {
+    out.push('Secure')
+  }
+
+  if (cookie.httpOnly) {
+    out.push('HttpOnly')
+  }
+
+  if (typeof cookie.maxAge === 'number') {
+    validateCookieMaxAge(cookie.maxAge)
+    out.push(`Max-Age=${cookie.maxAge}`)
+  }
+
+  if (cookie.domain) {
+    validateCookieDomain(cookie.domain)
+    out.push(`Domain=${cookie.domain}`)
+  }
+
+  if (cookie.path) {
+    validateCookiePath(cookie.path)
+    out.push(`Path=${cookie.path}`)
+  }
+
+  if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') {
+    out.push(`Expires=${toIMFDate(cookie.expires)}`)
+  }
+
+  if (cookie.sameSite) {
+    out.push(`SameSite=${cookie.sameSite}`)
+  }
+
+  for (const part of cookie.unparsed) {
+    if (!part.includes('=')) {
+      throw new Error('Invalid unparsed')
+    }
+
+    const [key, ...value] = part.split('=')
+
+    out.push(`${key.trim()}=${value.join('=')}`)
+  }
+
+  return out.join('; ')
+}
+
+let kHeadersListNode
+
+function getHeadersList (headers) {
+  if (headers[kHeadersList]) {
+    return headers[kHeadersList]
+  }
+
+  if (!kHeadersListNode) {
+    kHeadersListNode = Object.getOwnPropertySymbols(headers).find(
+      (symbol) => symbol.description === 'headers list'
+    )
+
+    assert(kHeadersListNode, 'Headers cannot be parsed')
+  }
+
+  const headersList = headers[kHeadersListNode]
+  assert(headersList)
+
+  return headersList
+}
+
+module.exports = {
+  isCTLExcludingHtab,
+  stringify,
+  getHeadersList
+}
+
+
+/***/ }),
+
+/***/ 2559:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const net = __nccwpck_require__(9278)
+const assert = __nccwpck_require__(2613)
+const util = __nccwpck_require__(3465)
+const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(3862)
+
+let tls // include tls conditionally since it is not always available
+
+// TODO: session re-use does not wait for the first
+// connection to resolve the session and might therefore
+// resolve the same servername multiple times even when
+// re-use is enabled.
+
+let SessionCache
+// FIXME: remove workaround when the Node bug is fixed
+// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
+if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) {
+  SessionCache = class WeakSessionCache {
+    constructor (maxCachedSessions) {
+      this._maxCachedSessions = maxCachedSessions
+      this._sessionCache = new Map()
+      this._sessionRegistry = new global.FinalizationRegistry((key) => {
+        if (this._sessionCache.size < this._maxCachedSessions) {
+          return
+        }
+
+        const ref = this._sessionCache.get(key)
+        if (ref !== undefined && ref.deref() === undefined) {
+          this._sessionCache.delete(key)
+        }
+      })
+    }
+
+    get (sessionKey) {
+      const ref = this._sessionCache.get(sessionKey)
+      return ref ? ref.deref() : null
+    }
+
+    set (sessionKey, session) {
+      if (this._maxCachedSessions === 0) {
+        return
+      }
+
+      this._sessionCache.set(sessionKey, new WeakRef(session))
+      this._sessionRegistry.register(session, sessionKey)
+    }
+  }
+} else {
+  SessionCache = class SimpleSessionCache {
+    constructor (maxCachedSessions) {
+      this._maxCachedSessions = maxCachedSessions
+      this._sessionCache = new Map()
+    }
+
+    get (sessionKey) {
+      return this._sessionCache.get(sessionKey)
+    }
+
+    set (sessionKey, session) {
+      if (this._maxCachedSessions === 0) {
+        return
+      }
+
+      if (this._sessionCache.size >= this._maxCachedSessions) {
+        // remove the oldest session
+        const { value: oldestKey } = this._sessionCache.keys().next()
+        this._sessionCache.delete(oldestKey)
+      }
+
+      this._sessionCache.set(sessionKey, session)
+    }
+  }
+}
+
+function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
+  if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
+    throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
+  }
+
+  const options = { path: socketPath, ...opts }
+  const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
+  timeout = timeout == null ? 10e3 : timeout
+  allowH2 = allowH2 != null ? allowH2 : false
+  return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
+    let socket
+    if (protocol === 'https:') {
+      if (!tls) {
+        tls = __nccwpck_require__(4756)
+      }
+      servername = servername || options.servername || util.getServerName(host) || null
+
+      const sessionKey = servername || hostname
+      const session = sessionCache.get(sessionKey) || null
+
+      assert(sessionKey)
+
+      socket = tls.connect({
+        highWaterMark: 16384, // TLS in node can't have bigger HWM anyway...
+        ...options,
+        servername,
+        session,
+        localAddress,
+        // TODO(HTTP/2): Add support for h2c
+        ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
+        socket: httpSocket, // upgrade socket connection
+        port: port || 443,
+        host: hostname
+      })
+
+      socket
+        .on('session', function (session) {
+          // TODO (fix): Can a session become invalid once established? Don't think so?
+          sessionCache.set(sessionKey, session)
+        })
+    } else {
+      assert(!httpSocket, 'httpSocket can only be sent on TLS update')
+      socket = net.connect({
+        highWaterMark: 64 * 1024, // Same as nodejs fs streams.
+        ...options,
+        localAddress,
+        port: port || 80,
+        host: hostname
+      })
+    }
+
+    // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket
+    if (options.keepAlive == null || options.keepAlive) {
+      const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay
+      socket.setKeepAlive(true, keepAliveInitialDelay)
+    }
+
+    const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout)
+
+    socket
+      .setNoDelay(true)
+      .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () {
+        cancelTimeout()
+
+        if (callback) {
+          const cb = callback
+          callback = null
+          cb(null, this)
+        }
+      })
+      .on('error', function (err) {
+        cancelTimeout()
+
+        if (callback) {
+          const cb = callback
+          callback = null
+          cb(err)
+        }
+      })
+
+    return socket
+  }
+}
+
+function setupTimeout (onConnectTimeout, timeout) {
+  if (!timeout) {
+    return () => {}
+  }
+
+  let s1 = null
+  let s2 = null
+  const timeoutId = setTimeout(() => {
+    // setImmediate is added to make sure that we priotorise socket error events over timeouts
+    s1 = setImmediate(() => {
+      if (process.platform === 'win32') {
+        // Windows needs an extra setImmediate probably due to implementation differences in the socket logic
+        s2 = setImmediate(() => onConnectTimeout())
+      } else {
+        onConnectTimeout()
+      }
+    })
+  }, timeout)
+  return () => {
+    clearTimeout(timeoutId)
+    clearImmediate(s1)
+    clearImmediate(s2)
+  }
+}
+
+function onConnectTimeout (socket) {
+  util.destroy(socket, new ConnectTimeoutError())
+}
+
+module.exports = buildConnector
+
+
+/***/ }),
+
+/***/ 8088:
+/***/ ((module) => {
+
+
+
+/** @type {Record<string, string | undefined>} */
+const headerNameLowerCasedRecord = {}
+
+// https://developer.mozilla.org/docs/Web/HTTP/Headers
+const wellknownHeaderNames = [
+  'Accept',
+  'Accept-Encoding',
+  'Accept-Language',
+  'Accept-Ranges',
+  'Access-Control-Allow-Credentials',
+  'Access-Control-Allow-Headers',
+  'Access-Control-Allow-Methods',
+  'Access-Control-Allow-Origin',
+  'Access-Control-Expose-Headers',
+  'Access-Control-Max-Age',
+  'Access-Control-Request-Headers',
+  'Access-Control-Request-Method',
+  'Age',
+  'Allow',
+  'Alt-Svc',
+  'Alt-Used',
+  'Authorization',
+  'Cache-Control',
+  'Clear-Site-Data',
+  'Connection',
+  'Content-Disposition',
+  'Content-Encoding',
+  'Content-Language',
+  'Content-Length',
+  'Content-Location',
+  'Content-Range',
+  'Content-Security-Policy',
+  'Content-Security-Policy-Report-Only',
+  'Content-Type',
+  'Cookie',
+  'Cross-Origin-Embedder-Policy',
+  'Cross-Origin-Opener-Policy',
+  'Cross-Origin-Resource-Policy',
+  'Date',
+  'Device-Memory',
+  'Downlink',
+  'ECT',
+  'ETag',
+  'Expect',
+  'Expect-CT',
+  'Expires',
+  'Forwarded',
+  'From',
+  'Host',
+  'If-Match',
+  'If-Modified-Since',
+  'If-None-Match',
+  'If-Range',
+  'If-Unmodified-Since',
+  'Keep-Alive',
+  'Last-Modified',
+  'Link',
+  'Location',
+  'Max-Forwards',
+  'Origin',
+  'Permissions-Policy',
+  'Pragma',
+  'Proxy-Authenticate',
+  'Proxy-Authorization',
+  'RTT',
+  'Range',
+  'Referer',
+  'Referrer-Policy',
+  'Refresh',
+  'Retry-After',
+  'Sec-WebSocket-Accept',
+  'Sec-WebSocket-Extensions',
+  'Sec-WebSocket-Key',
+  'Sec-WebSocket-Protocol',
+  'Sec-WebSocket-Version',
+  'Server',
+  'Server-Timing',
+  'Service-Worker-Allowed',
+  'Service-Worker-Navigation-Preload',
+  'Set-Cookie',
+  'SourceMap',
+  'Strict-Transport-Security',
+  'Supports-Loading-Mode',
+  'TE',
+  'Timing-Allow-Origin',
+  'Trailer',
+  'Transfer-Encoding',
+  'Upgrade',
+  'Upgrade-Insecure-Requests',
+  'User-Agent',
+  'Vary',
+  'Via',
+  'WWW-Authenticate',
+  'X-Content-Type-Options',
+  'X-DNS-Prefetch-Control',
+  'X-Frame-Options',
+  'X-Permitted-Cross-Domain-Policies',
+  'X-Powered-By',
+  'X-Requested-With',
+  'X-XSS-Protection'
+]
+
+for (let i = 0; i < wellknownHeaderNames.length; ++i) {
+  const key = wellknownHeaderNames[i]
+  const lowerCasedKey = key.toLowerCase()
+  headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
+    lowerCasedKey
+}
+
+// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
+Object.setPrototypeOf(headerNameLowerCasedRecord, null)
+
+module.exports = {
+  wellknownHeaderNames,
+  headerNameLowerCasedRecord
+}
+
+
+/***/ }),
+
+/***/ 3862:
+/***/ ((module) => {
+
+
+
+class UndiciError extends Error {
+  constructor (message) {
+    super(message)
+    this.name = 'UndiciError'
+    this.code = 'UND_ERR'
+  }
+}
+
+class ConnectTimeoutError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, ConnectTimeoutError)
+    this.name = 'ConnectTimeoutError'
+    this.message = message || 'Connect Timeout Error'
+    this.code = 'UND_ERR_CONNECT_TIMEOUT'
+  }
+}
+
+class HeadersTimeoutError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, HeadersTimeoutError)
+    this.name = 'HeadersTimeoutError'
+    this.message = message || 'Headers Timeout Error'
+    this.code = 'UND_ERR_HEADERS_TIMEOUT'
+  }
+}
+
+class HeadersOverflowError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, HeadersOverflowError)
+    this.name = 'HeadersOverflowError'
+    this.message = message || 'Headers Overflow Error'
+    this.code = 'UND_ERR_HEADERS_OVERFLOW'
+  }
+}
+
+class BodyTimeoutError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, BodyTimeoutError)
+    this.name = 'BodyTimeoutError'
+    this.message = message || 'Body Timeout Error'
+    this.code = 'UND_ERR_BODY_TIMEOUT'
+  }
+}
+
+class ResponseStatusCodeError extends UndiciError {
+  constructor (message, statusCode, headers, body) {
+    super(message)
+    Error.captureStackTrace(this, ResponseStatusCodeError)
+    this.name = 'ResponseStatusCodeError'
+    this.message = message || 'Response Status Code Error'
+    this.code = 'UND_ERR_RESPONSE_STATUS_CODE'
+    this.body = body
+    this.status = statusCode
+    this.statusCode = statusCode
+    this.headers = headers
+  }
+}
+
+class InvalidArgumentError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, InvalidArgumentError)
+    this.name = 'InvalidArgumentError'
+    this.message = message || 'Invalid Argument Error'
+    this.code = 'UND_ERR_INVALID_ARG'
+  }
+}
+
+class InvalidReturnValueError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, InvalidReturnValueError)
+    this.name = 'InvalidReturnValueError'
+    this.message = message || 'Invalid Return Value Error'
+    this.code = 'UND_ERR_INVALID_RETURN_VALUE'
+  }
+}
+
+class RequestAbortedError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, RequestAbortedError)
+    this.name = 'AbortError'
+    this.message = message || 'Request aborted'
+    this.code = 'UND_ERR_ABORTED'
+  }
+}
+
+class InformationalError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, InformationalError)
+    this.name = 'InformationalError'
+    this.message = message || 'Request information'
+    this.code = 'UND_ERR_INFO'
+  }
+}
+
+class RequestContentLengthMismatchError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, RequestContentLengthMismatchError)
+    this.name = 'RequestContentLengthMismatchError'
+    this.message = message || 'Request body length does not match content-length header'
+    this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'
+  }
+}
+
+class ResponseContentLengthMismatchError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, ResponseContentLengthMismatchError)
+    this.name = 'ResponseContentLengthMismatchError'
+    this.message = message || 'Response body length does not match content-length header'
+    this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'
+  }
+}
+
+class ClientDestroyedError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, ClientDestroyedError)
+    this.name = 'ClientDestroyedError'
+    this.message = message || 'The client is destroyed'
+    this.code = 'UND_ERR_DESTROYED'
+  }
+}
+
+class ClientClosedError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, ClientClosedError)
+    this.name = 'ClientClosedError'
+    this.message = message || 'The client is closed'
+    this.code = 'UND_ERR_CLOSED'
+  }
+}
+
+class SocketError extends UndiciError {
+  constructor (message, socket) {
+    super(message)
+    Error.captureStackTrace(this, SocketError)
+    this.name = 'SocketError'
+    this.message = message || 'Socket error'
+    this.code = 'UND_ERR_SOCKET'
+    this.socket = socket
+  }
+}
+
+class NotSupportedError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, NotSupportedError)
+    this.name = 'NotSupportedError'
+    this.message = message || 'Not supported error'
+    this.code = 'UND_ERR_NOT_SUPPORTED'
+  }
+}
+
+class BalancedPoolMissingUpstreamError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, NotSupportedError)
+    this.name = 'MissingUpstreamError'
+    this.message = message || 'No upstream has been added to the BalancedPool'
+    this.code = 'UND_ERR_BPL_MISSING_UPSTREAM'
+  }
+}
+
+class HTTPParserError extends Error {
+  constructor (message, code, data) {
+    super(message)
+    Error.captureStackTrace(this, HTTPParserError)
+    this.name = 'HTTPParserError'
+    this.code = code ? `HPE_${code}` : undefined
+    this.data = data ? data.toString() : undefined
+  }
+}
+
+class ResponseExceededMaxSizeError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, ResponseExceededMaxSizeError)
+    this.name = 'ResponseExceededMaxSizeError'
+    this.message = message || 'Response content exceeded max size'
+    this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE'
+  }
+}
+
+class RequestRetryError extends UndiciError {
+  constructor (message, code, { headers, data }) {
+    super(message)
+    Error.captureStackTrace(this, RequestRetryError)
+    this.name = 'RequestRetryError'
+    this.message = message || 'Request retry error'
+    this.code = 'UND_ERR_REQ_RETRY'
+    this.statusCode = code
+    this.data = data
+    this.headers = headers
+  }
+}
+
+module.exports = {
+  HTTPParserError,
+  UndiciError,
+  HeadersTimeoutError,
+  HeadersOverflowError,
+  BodyTimeoutError,
+  RequestContentLengthMismatchError,
+  ConnectTimeoutError,
+  ResponseStatusCodeError,
+  InvalidArgumentError,
+  InvalidReturnValueError,
+  RequestAbortedError,
+  ClientDestroyedError,
+  ClientClosedError,
+  InformationalError,
+  SocketError,
+  NotSupportedError,
+  ResponseContentLengthMismatchError,
+  BalancedPoolMissingUpstreamError,
+  ResponseExceededMaxSizeError,
+  RequestRetryError
+}
+
+
+/***/ }),
+
+/***/ 4916:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const {
+  InvalidArgumentError,
+  NotSupportedError
+} = __nccwpck_require__(3862)
+const assert = __nccwpck_require__(2613)
+const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(4856)
+const util = __nccwpck_require__(3465)
+
+// tokenRegExp and headerCharRegex have been lifted from
+// https://github.com/nodejs/node/blob/main/lib/_http_common.js
+
+/**
+ * Verifies that the given val is a valid HTTP token
+ * per the rules defined in RFC 7230
+ * See https://tools.ietf.org/html/rfc7230#section-3.2.6
+ */
+const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/
+
+/**
+ * Matches if val contains an invalid field-vchar
+ *  field-value    = *( field-content / obs-fold )
+ *  field-content  = field-vchar [ 1*( SP / HTAB ) field-vchar ]
+ *  field-vchar    = VCHAR / obs-text
+ */
+const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/
+
+// Verifies that a given path is valid does not contain control chars \x00 to \x20
+const invalidPathRegex = /[^\u0021-\u00ff]/
+
+const kHandler = Symbol('handler')
+
+const channels = {}
+
+let extractBody
+
+try {
+  const diagnosticsChannel = __nccwpck_require__(1637)
+  channels.create = diagnosticsChannel.channel('undici:request:create')
+  channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent')
+  channels.headers = diagnosticsChannel.channel('undici:request:headers')
+  channels.trailers = diagnosticsChannel.channel('undici:request:trailers')
+  channels.error = diagnosticsChannel.channel('undici:request:error')
+} catch {
+  channels.create = { hasSubscribers: false }
+  channels.bodySent = { hasSubscribers: false }
+  channels.headers = { hasSubscribers: false }
+  channels.trailers = { hasSubscribers: false }
+  channels.error = { hasSubscribers: false }
+}
+
+class Request {
+  constructor (origin, {
+    path,
+    method,
+    body,
+    headers,
+    query,
+    idempotent,
+    blocking,
+    upgrade,
+    headersTimeout,
+    bodyTimeout,
+    reset,
+    throwOnError,
+    expectContinue
+  }, handler) {
+    if (typeof path !== 'string') {
+      throw new InvalidArgumentError('path must be a string')
+    } else if (
+      path[0] !== '/' &&
+      !(path.startsWith('http://') || path.startsWith('https://')) &&
+      method !== 'CONNECT'
+    ) {
+      throw new InvalidArgumentError('path must be an absolute URL or start with a slash')
+    } else if (invalidPathRegex.exec(path) !== null) {
+      throw new InvalidArgumentError('invalid request path')
+    }
+
+    if (typeof method !== 'string') {
+      throw new InvalidArgumentError('method must be a string')
+    } else if (tokenRegExp.exec(method) === null) {
+      throw new InvalidArgumentError('invalid request method')
+    }
+
+    if (upgrade && typeof upgrade !== 'string') {
+      throw new InvalidArgumentError('upgrade must be a string')
+    }
+
+    if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) {
+      throw new InvalidArgumentError('invalid headersTimeout')
+    }
+
+    if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) {
+      throw new InvalidArgumentError('invalid bodyTimeout')
+    }
+
+    if (reset != null && typeof reset !== 'boolean') {
+      throw new InvalidArgumentError('invalid reset')
+    }
+
+    if (expectContinue != null && typeof expectContinue !== 'boolean') {
+      throw new InvalidArgumentError('invalid expectContinue')
+    }
+
+    this.headersTimeout = headersTimeout
+
+    this.bodyTimeout = bodyTimeout
+
+    this.throwOnError = throwOnError === true
+
+    this.method = method
+
+    this.abort = null
+
+    if (body == null) {
+      this.body = null
+    } else if (util.isStream(body)) {
+      this.body = body
+
+      const rState = this.body._readableState
+      if (!rState || !rState.autoDestroy) {
+        this.endHandler = function autoDestroy () {
+          util.destroy(this)
+        }
+        this.body.on('end', this.endHandler)
+      }
+
+      this.errorHandler = err => {
+        if (this.abort) {
+          this.abort(err)
+        } else {
+          this.error = err
+        }
+      }
+      this.body.on('error', this.errorHandler)
+    } else if (util.isBuffer(body)) {
+      this.body = body.byteLength ? body : null
+    } else if (ArrayBuffer.isView(body)) {
+      this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null
+    } else if (body instanceof ArrayBuffer) {
+      this.body = body.byteLength ? Buffer.from(body) : null
+    } else if (typeof body === 'string') {
+      this.body = body.length ? Buffer.from(body) : null
+    } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) {
+      this.body = body
+    } else {
+      throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable')
+    }
+
+    this.completed = false
+
+    this.aborted = false
+
+    this.upgrade = upgrade || null
+
+    this.path = query ? util.buildURL(path, query) : path
+
+    this.origin = origin
+
+    this.idempotent = idempotent == null
+      ? method === 'HEAD' || method === 'GET'
+      : idempotent
+
+    this.blocking = blocking == null ? false : blocking
+
+    this.reset = reset == null ? null : reset
+
+    this.host = null
+
+    this.contentLength = null
+
+    this.contentType = null
+
+    this.headers = ''
+
+    // Only for H2
+    this.expectContinue = expectContinue != null ? expectContinue : false
+
+    if (Array.isArray(headers)) {
+      if (headers.length % 2 !== 0) {
+        throw new InvalidArgumentError('headers array must be even')
+      }
+      for (let i = 0; i < headers.length; i += 2) {
+        processHeader(this, headers[i], headers[i + 1])
+      }
+    } else if (headers && typeof headers === 'object') {
+      const keys = Object.keys(headers)
+      for (let i = 0; i < keys.length; i++) {
+        const key = keys[i]
+        processHeader(this, key, headers[key])
+      }
+    } else if (headers != null) {
+      throw new InvalidArgumentError('headers must be an object or an array')
+    }
+
+    if (util.isFormDataLike(this.body)) {
+      if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) {
+        throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.')
+      }
+
+      if (!extractBody) {
+        extractBody = (__nccwpck_require__(1380).extractBody)
+      }
+
+      const [bodyStream, contentType] = extractBody(body)
+      if (this.contentType == null) {
+        this.contentType = contentType
+        this.headers += `content-type: ${contentType}\r\n`
+      }
+      this.body = bodyStream.stream
+      this.contentLength = bodyStream.length
+    } else if (util.isBlobLike(body) && this.contentType == null && body.type) {
+      this.contentType = body.type
+      this.headers += `content-type: ${body.type}\r\n`
+    }
+
+    util.validateHandler(handler, method, upgrade)
+
+    this.servername = util.getServerName(this.host)
+
+    this[kHandler] = handler
+
+    if (channels.create.hasSubscribers) {
+      channels.create.publish({ request: this })
+    }
+  }
+
+  onBodySent (chunk) {
+    if (this[kHandler].onBodySent) {
+      try {
+        return this[kHandler].onBodySent(chunk)
+      } catch (err) {
+        this.abort(err)
+      }
+    }
+  }
+
+  onRequestSent () {
+    if (channels.bodySent.hasSubscribers) {
+      channels.bodySent.publish({ request: this })
+    }
+
+    if (this[kHandler].onRequestSent) {
+      try {
+        return this[kHandler].onRequestSent()
+      } catch (err) {
+        this.abort(err)
+      }
+    }
+  }
+
+  onConnect (abort) {
+    assert(!this.aborted)
+    assert(!this.completed)
+
+    if (this.error) {
+      abort(this.error)
+    } else {
+      this.abort = abort
+      return this[kHandler].onConnect(abort)
+    }
+  }
+
+  onHeaders (statusCode, headers, resume, statusText) {
+    assert(!this.aborted)
+    assert(!this.completed)
+
+    if (channels.headers.hasSubscribers) {
+      channels.headers.publish({ request: this, response: { statusCode, headers, statusText } })
+    }
+
+    try {
+      return this[kHandler].onHeaders(statusCode, headers, resume, statusText)
+    } catch (err) {
+      this.abort(err)
+    }
+  }
+
+  onData (chunk) {
+    assert(!this.aborted)
+    assert(!this.completed)
+
+    try {
+      return this[kHandler].onData(chunk)
+    } catch (err) {
+      this.abort(err)
+      return false
+    }
+  }
+
+  onUpgrade (statusCode, headers, socket) {
+    assert(!this.aborted)
+    assert(!this.completed)
+
+    return this[kHandler].onUpgrade(statusCode, headers, socket)
+  }
+
+  onComplete (trailers) {
+    this.onFinally()
+
+    assert(!this.aborted)
+
+    this.completed = true
+    if (channels.trailers.hasSubscribers) {
+      channels.trailers.publish({ request: this, trailers })
+    }
+
+    try {
+      return this[kHandler].onComplete(trailers)
+    } catch (err) {
+      // TODO (fix): This might be a bad idea?
+      this.onError(err)
+    }
+  }
+
+  onError (error) {
+    this.onFinally()
+
+    if (channels.error.hasSubscribers) {
+      channels.error.publish({ request: this, error })
+    }
+
+    if (this.aborted) {
+      return
+    }
+    this.aborted = true
+
+    return this[kHandler].onError(error)
+  }
+
+  onFinally () {
+    if (this.errorHandler) {
+      this.body.off('error', this.errorHandler)
+      this.errorHandler = null
+    }
+
+    if (this.endHandler) {
+      this.body.off('end', this.endHandler)
+      this.endHandler = null
+    }
+  }
+
+  // TODO: adjust to support H2
+  addHeader (key, value) {
+    processHeader(this, key, value)
+    return this
+  }
+
+  static [kHTTP1BuildRequest] (origin, opts, handler) {
+    // TODO: Migrate header parsing here, to make Requests
+    // HTTP agnostic
+    return new Request(origin, opts, handler)
+  }
+
+  static [kHTTP2BuildRequest] (origin, opts, handler) {
+    const headers = opts.headers
+    opts = { ...opts, headers: null }
+
+    const request = new Request(origin, opts, handler)
+
+    request.headers = {}
+
+    if (Array.isArray(headers)) {
+      if (headers.length % 2 !== 0) {
+        throw new InvalidArgumentError('headers array must be even')
+      }
+      for (let i = 0; i < headers.length; i += 2) {
+        processHeader(request, headers[i], headers[i + 1], true)
+      }
+    } else if (headers && typeof headers === 'object') {
+      const keys = Object.keys(headers)
+      for (let i = 0; i < keys.length; i++) {
+        const key = keys[i]
+        processHeader(request, key, headers[key], true)
+      }
+    } else if (headers != null) {
+      throw new InvalidArgumentError('headers must be an object or an array')
+    }
+
+    return request
+  }
+
+  static [kHTTP2CopyHeaders] (raw) {
+    const rawHeaders = raw.split('\r\n')
+    const headers = {}
+
+    for (const header of rawHeaders) {
+      const [key, value] = header.split(': ')
+
+      if (value == null || value.length === 0) continue
+
+      if (headers[key]) headers[key] += `,${value}`
+      else headers[key] = value
+    }
+
+    return headers
+  }
+}
+
+function processHeaderValue (key, val, skipAppend) {
+  if (val && typeof val === 'object') {
+    throw new InvalidArgumentError(`invalid ${key} header`)
+  }
+
+  val = val != null ? `${val}` : ''
+
+  if (headerCharRegex.exec(val) !== null) {
+    throw new InvalidArgumentError(`invalid ${key} header`)
+  }
+
+  return skipAppend ? val : `${key}: ${val}\r\n`
+}
+
+function processHeader (request, key, val, skipAppend = false) {
+  if (val && (typeof val === 'object' && !Array.isArray(val))) {
+    throw new InvalidArgumentError(`invalid ${key} header`)
+  } else if (val === undefined) {
+    return
+  }
+
+  if (
+    request.host === null &&
+    key.length === 4 &&
+    key.toLowerCase() === 'host'
+  ) {
+    if (headerCharRegex.exec(val) !== null) {
+      throw new InvalidArgumentError(`invalid ${key} header`)
+    }
+    // Consumed by Client
+    request.host = val
+  } else if (
+    request.contentLength === null &&
+    key.length === 14 &&
+    key.toLowerCase() === 'content-length'
+  ) {
+    request.contentLength = parseInt(val, 10)
+    if (!Number.isFinite(request.contentLength)) {
+      throw new InvalidArgumentError('invalid content-length header')
+    }
+  } else if (
+    request.contentType === null &&
+    key.length === 12 &&
+    key.toLowerCase() === 'content-type'
+  ) {
+    request.contentType = val
+    if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+    else request.headers += processHeaderValue(key, val)
+  } else if (
+    key.length === 17 &&
+    key.toLowerCase() === 'transfer-encoding'
+  ) {
+    throw new InvalidArgumentError('invalid transfer-encoding header')
+  } else if (
+    key.length === 10 &&
+    key.toLowerCase() === 'connection'
+  ) {
+    const value = typeof val === 'string' ? val.toLowerCase() : null
+    if (value !== 'close' && value !== 'keep-alive') {
+      throw new InvalidArgumentError('invalid connection header')
+    } else if (value === 'close') {
+      request.reset = true
+    }
+  } else if (
+    key.length === 10 &&
+    key.toLowerCase() === 'keep-alive'
+  ) {
+    throw new InvalidArgumentError('invalid keep-alive header')
+  } else if (
+    key.length === 7 &&
+    key.toLowerCase() === 'upgrade'
+  ) {
+    throw new InvalidArgumentError('invalid upgrade header')
+  } else if (
+    key.length === 6 &&
+    key.toLowerCase() === 'expect'
+  ) {
+    throw new NotSupportedError('expect header not supported')
+  } else if (tokenRegExp.exec(key) === null) {
+    throw new InvalidArgumentError('invalid header key')
+  } else {
+    if (Array.isArray(val)) {
+      for (let i = 0; i < val.length; i++) {
+        if (skipAppend) {
+          if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
+          else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
+        } else {
+          request.headers += processHeaderValue(key, val[i])
+        }
+      }
+    } else {
+      if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+      else request.headers += processHeaderValue(key, val)
+    }
+  }
+}
+
+module.exports = Request
+
+
+/***/ }),
+
+/***/ 4856:
+/***/ ((module) => {
+
+module.exports = {
+  kClose: Symbol('close'),
+  kDestroy: Symbol('destroy'),
+  kDispatch: Symbol('dispatch'),
+  kUrl: Symbol('url'),
+  kWriting: Symbol('writing'),
+  kResuming: Symbol('resuming'),
+  kQueue: Symbol('queue'),
+  kConnect: Symbol('connect'),
+  kConnecting: Symbol('connecting'),
+  kHeadersList: Symbol('headers list'),
+  kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'),
+  kKeepAliveMaxTimeout: Symbol('max keep alive timeout'),
+  kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'),
+  kKeepAliveTimeoutValue: Symbol('keep alive timeout'),
+  kKeepAlive: Symbol('keep alive'),
+  kHeadersTimeout: Symbol('headers timeout'),
+  kBodyTimeout: Symbol('body timeout'),
+  kServerName: Symbol('server name'),
+  kLocalAddress: Symbol('local address'),
+  kHost: Symbol('host'),
+  kNoRef: Symbol('no ref'),
+  kBodyUsed: Symbol('used'),
+  kRunning: Symbol('running'),
+  kBlocking: Symbol('blocking'),
+  kPending: Symbol('pending'),
+  kSize: Symbol('size'),
+  kBusy: Symbol('busy'),
+  kQueued: Symbol('queued'),
+  kFree: Symbol('free'),
+  kConnected: Symbol('connected'),
+  kClosed: Symbol('closed'),
+  kNeedDrain: Symbol('need drain'),
+  kReset: Symbol('reset'),
+  kDestroyed: Symbol.for('nodejs.stream.destroyed'),
+  kMaxHeadersSize: Symbol('max headers size'),
+  kRunningIdx: Symbol('running index'),
+  kPendingIdx: Symbol('pending index'),
+  kError: Symbol('error'),
+  kClients: Symbol('clients'),
+  kClient: Symbol('client'),
+  kParser: Symbol('parser'),
+  kOnDestroyed: Symbol('destroy callbacks'),
+  kPipelining: Symbol('pipelining'),
+  kSocket: Symbol('socket'),
+  kHostHeader: Symbol('host header'),
+  kConnector: Symbol('connector'),
+  kStrictContentLength: Symbol('strict content length'),
+  kMaxRedirections: Symbol('maxRedirections'),
+  kMaxRequests: Symbol('maxRequestsPerClient'),
+  kProxy: Symbol('proxy agent options'),
+  kCounter: Symbol('socket request counter'),
+  kInterceptors: Symbol('dispatch interceptors'),
+  kMaxResponseSize: Symbol('max response size'),
+  kHTTP2Session: Symbol('http2Session'),
+  kHTTP2SessionState: Symbol('http2Session state'),
+  kHTTP2BuildRequest: Symbol('http2 build request'),
+  kHTTP1BuildRequest: Symbol('http1 build request'),
+  kHTTP2CopyHeaders: Symbol('http2 copy headers'),
+  kHTTPConnVersion: Symbol('http connection version'),
+  kRetryHandlerDefaultRetry: Symbol('retry agent default retry'),
+  kConstruct: Symbol('constructable')
+}
+
+
+/***/ }),
+
+/***/ 3465:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const assert = __nccwpck_require__(2613)
+const { kDestroyed, kBodyUsed } = __nccwpck_require__(4856)
+const { IncomingMessage } = __nccwpck_require__(8611)
+const stream = __nccwpck_require__(2203)
+const net = __nccwpck_require__(9278)
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+const { Blob } = __nccwpck_require__(181)
+const nodeUtil = __nccwpck_require__(9023)
+const { stringify } = __nccwpck_require__(3480)
+const { headerNameLowerCasedRecord } = __nccwpck_require__(8088)
+
+const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
+
+function nop () {}
+
+function isStream (obj) {
+  return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function'
+}
+
+// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License)
+function isBlobLike (object) {
+  return (Blob && object instanceof Blob) || (
+    object &&
+    typeof object === 'object' &&
+    (typeof object.stream === 'function' ||
+      typeof object.arrayBuffer === 'function') &&
+    /^(Blob|File)$/.test(object[Symbol.toStringTag])
+  )
+}
+
+function buildURL (url, queryParams) {
+  if (url.includes('?') || url.includes('#')) {
+    throw new Error('Query params cannot be passed when url already contains "?" or "#".')
+  }
+
+  const stringified = stringify(queryParams)
+
+  if (stringified) {
+    url += '?' + stringified
+  }
+
+  return url
+}
+
+function parseURL (url) {
+  if (typeof url === 'string') {
+    url = new URL(url)
+
+    if (!/^https?:/.test(url.origin || url.protocol)) {
+      throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
+    }
+
+    return url
+  }
+
+  if (!url || typeof url !== 'object') {
+    throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.')
+  }
+
+  if (!/^https?:/.test(url.origin || url.protocol)) {
+    throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.')
+  }
+
+  if (!(url instanceof URL)) {
+    if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) {
+      throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.')
+    }
+
+    if (url.path != null && typeof url.path !== 'string') {
+      throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.')
+    }
+
+    if (url.pathname != null && typeof url.pathname !== 'string') {
+      throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.')
+    }
+
+    if (url.hostname != null && typeof url.hostname !== 'string') {
+      throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.')
+    }
+
+    if (url.origin != null && typeof url.origin !== 'string') {
+      throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.')
+    }
+
+    const port = url.port != null
+      ? url.port
+      : (url.protocol === 'https:' ? 443 : 80)
+    let origin = url.origin != null
+      ? url.origin
+      : `${url.protocol}//${url.hostname}:${port}`
+    let path = url.path != null
+      ? url.path
+      : `${url.pathname || ''}${url.search || ''}`
+
+    if (origin.endsWith('/')) {
+      origin = origin.substring(0, origin.length - 1)
+    }
+
+    if (path && !path.startsWith('/')) {
+      path = `/${path}`
+    }
+    // new URL(path, origin) is unsafe when `path` contains an absolute URL
+    // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL:
+    // If first parameter is a relative URL, second param is required, and will be used as the base URL.
+    // If first parameter is an absolute URL, a given second param will be ignored.
+    url = new URL(origin + path)
+  }
+
+  return url
+}
+
+function parseOrigin (url) {
+  url = parseURL(url)
+
+  if (url.pathname !== '/' || url.search || url.hash) {
+    throw new InvalidArgumentError('invalid url')
+  }
+
+  return url
+}
+
+function getHostname (host) {
+  if (host[0] === '[') {
+    const idx = host.indexOf(']')
+
+    assert(idx !== -1)
+    return host.substring(1, idx)
+  }
+
+  const idx = host.indexOf(':')
+  if (idx === -1) return host
+
+  return host.substring(0, idx)
+}
+
+// IP addresses are not valid server names per RFC6066
+// > Currently, the only server names supported are DNS hostnames
+function getServerName (host) {
+  if (!host) {
+    return null
+  }
+
+  assert.strictEqual(typeof host, 'string')
+
+  const servername = getHostname(host)
+  if (net.isIP(servername)) {
+    return ''
+  }
+
+  return servername
+}
+
+function deepClone (obj) {
+  return JSON.parse(JSON.stringify(obj))
+}
+
+function isAsyncIterable (obj) {
+  return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function')
+}
+
+function isIterable (obj) {
+  return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function'))
+}
+
+function bodyLength (body) {
+  if (body == null) {
+    return 0
+  } else if (isStream(body)) {
+    const state = body._readableState
+    return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
+      ? state.length
+      : null
+  } else if (isBlobLike(body)) {
+    return body.size != null ? body.size : null
+  } else if (isBuffer(body)) {
+    return body.byteLength
+  }
+
+  return null
+}
+
+function isDestroyed (stream) {
+  return !stream || !!(stream.destroyed || stream[kDestroyed])
+}
+
+function isReadableAborted (stream) {
+  const state = stream && stream._readableState
+  return isDestroyed(stream) && state && !state.endEmitted
+}
+
+function destroy (stream, err) {
+  if (stream == null || !isStream(stream) || isDestroyed(stream)) {
+    return
+  }
+
+  if (typeof stream.destroy === 'function') {
+    if (Object.getPrototypeOf(stream).constructor === IncomingMessage) {
+      // See: https://github.com/nodejs/node/pull/38505/files
+      stream.socket = null
+    }
+
+    stream.destroy(err)
+  } else if (err) {
+    process.nextTick((stream, err) => {
+      stream.emit('error', err)
+    }, stream, err)
+  }
+
+  if (stream.destroyed !== true) {
+    stream[kDestroyed] = true
+  }
+}
+
+const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/
+function parseKeepAliveTimeout (val) {
+  const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR)
+  return m ? parseInt(m[1], 10) * 1000 : null
+}
+
+/**
+ * Retrieves a header name and returns its lowercase value.
+ * @param {string | Buffer} value Header name
+ * @returns {string}
+ */
+function headerNameToString (value) {
+  return headerNameLowerCasedRecord[value] || value.toLowerCase()
+}
+
+function parseHeaders (headers, obj = {}) {
+  // For H2 support
+  if (!Array.isArray(headers)) return headers
+
+  for (let i = 0; i < headers.length; i += 2) {
+    const key = headers[i].toString().toLowerCase()
+    let val = obj[key]
+
+    if (!val) {
+      if (Array.isArray(headers[i + 1])) {
+        obj[key] = headers[i + 1].map(x => x.toString('utf8'))
+      } else {
+        obj[key] = headers[i + 1].toString('utf8')
+      }
+    } else {
+      if (!Array.isArray(val)) {
+        val = [val]
+        obj[key] = val
+      }
+      val.push(headers[i + 1].toString('utf8'))
+    }
+  }
+
+  // See https://github.com/nodejs/node/pull/46528
+  if ('content-length' in obj && 'content-disposition' in obj) {
+    obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1')
+  }
+
+  return obj
+}
+
+function parseRawHeaders (headers) {
+  const ret = []
+  let hasContentLength = false
+  let contentDispositionIdx = -1
+
+  for (let n = 0; n < headers.length; n += 2) {
+    const key = headers[n + 0].toString()
+    const val = headers[n + 1].toString('utf8')
+
+    if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) {
+      ret.push(key, val)
+      hasContentLength = true
+    } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) {
+      contentDispositionIdx = ret.push(key, val) - 1
+    } else {
+      ret.push(key, val)
+    }
+  }
+
+  // See https://github.com/nodejs/node/pull/46528
+  if (hasContentLength && contentDispositionIdx !== -1) {
+    ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1')
+  }
+
+  return ret
+}
+
+function isBuffer (buffer) {
+  // See, https://github.com/mcollina/undici/pull/319
+  return buffer instanceof Uint8Array || Buffer.isBuffer(buffer)
+}
+
+function validateHandler (handler, method, upgrade) {
+  if (!handler || typeof handler !== 'object') {
+    throw new InvalidArgumentError('handler must be an object')
+  }
+
+  if (typeof handler.onConnect !== 'function') {
+    throw new InvalidArgumentError('invalid onConnect method')
+  }
+
+  if (typeof handler.onError !== 'function') {
+    throw new InvalidArgumentError('invalid onError method')
+  }
+
+  if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) {
+    throw new InvalidArgumentError('invalid onBodySent method')
+  }
+
+  if (upgrade || method === 'CONNECT') {
+    if (typeof handler.onUpgrade !== 'function') {
+      throw new InvalidArgumentError('invalid onUpgrade method')
+    }
+  } else {
+    if (typeof handler.onHeaders !== 'function') {
+      throw new InvalidArgumentError('invalid onHeaders method')
+    }
+
+    if (typeof handler.onData !== 'function') {
+      throw new InvalidArgumentError('invalid onData method')
+    }
+
+    if (typeof handler.onComplete !== 'function') {
+      throw new InvalidArgumentError('invalid onComplete method')
+    }
+  }
+}
+
+// A body is disturbed if it has been read from and it cannot
+// be re-used without losing state or data.
+function isDisturbed (body) {
+  return !!(body && (
+    stream.isDisturbed
+      ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed?
+      : body[kBodyUsed] ||
+        body.readableDidRead ||
+        (body._readableState && body._readableState.dataEmitted) ||
+        isReadableAborted(body)
+  ))
+}
+
+function isErrored (body) {
+  return !!(body && (
+    stream.isErrored
+      ? stream.isErrored(body)
+      : /state: 'errored'/.test(nodeUtil.inspect(body)
+      )))
+}
+
+function isReadable (body) {
+  return !!(body && (
+    stream.isReadable
+      ? stream.isReadable(body)
+      : /state: 'readable'/.test(nodeUtil.inspect(body)
+      )))
+}
+
+function getSocketInfo (socket) {
+  return {
+    localAddress: socket.localAddress,
+    localPort: socket.localPort,
+    remoteAddress: socket.remoteAddress,
+    remotePort: socket.remotePort,
+    remoteFamily: socket.remoteFamily,
+    timeout: socket.timeout,
+    bytesWritten: socket.bytesWritten,
+    bytesRead: socket.bytesRead
+  }
+}
+
+async function * convertIterableToBuffer (iterable) {
+  for await (const chunk of iterable) {
+    yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
+  }
+}
+
+let ReadableStream
+function ReadableStreamFrom (iterable) {
+  if (!ReadableStream) {
+    ReadableStream = (__nccwpck_require__(3774).ReadableStream)
+  }
+
+  if (ReadableStream.from) {
+    return ReadableStream.from(convertIterableToBuffer(iterable))
+  }
+
+  let iterator
+  return new ReadableStream(
+    {
+      async start () {
+        iterator = iterable[Symbol.asyncIterator]()
+      },
+      async pull (controller) {
+        const { done, value } = await iterator.next()
+        if (done) {
+          queueMicrotask(() => {
+            controller.close()
+          })
+        } else {
+          const buf = Buffer.isBuffer(value) ? value : Buffer.from(value)
+          controller.enqueue(new Uint8Array(buf))
+        }
+        return controller.desiredSize > 0
+      },
+      async cancel (reason) {
+        await iterator.return()
+      }
+    },
+    0
+  )
+}
+
+// The chunk should be a FormData instance and contains
+// all the required methods.
+function isFormDataLike (object) {
+  return (
+    object &&
+    typeof object === 'object' &&
+    typeof object.append === 'function' &&
+    typeof object.delete === 'function' &&
+    typeof object.get === 'function' &&
+    typeof object.getAll === 'function' &&
+    typeof object.has === 'function' &&
+    typeof object.set === 'function' &&
+    object[Symbol.toStringTag] === 'FormData'
+  )
+}
+
+function throwIfAborted (signal) {
+  if (!signal) { return }
+  if (typeof signal.throwIfAborted === 'function') {
+    signal.throwIfAborted()
+  } else {
+    if (signal.aborted) {
+      // DOMException not available < v17.0.0
+      const err = new Error('The operation was aborted')
+      err.name = 'AbortError'
+      throw err
+    }
+  }
+}
+
+function addAbortListener (signal, listener) {
+  if ('addEventListener' in signal) {
+    signal.addEventListener('abort', listener, { once: true })
+    return () => signal.removeEventListener('abort', listener)
+  }
+  signal.addListener('abort', listener)
+  return () => signal.removeListener('abort', listener)
+}
+
+const hasToWellFormed = !!String.prototype.toWellFormed
+
+/**
+ * @param {string} val
+ */
+function toUSVString (val) {
+  if (hasToWellFormed) {
+    return `${val}`.toWellFormed()
+  } else if (nodeUtil.toUSVString) {
+    return nodeUtil.toUSVString(val)
+  }
+
+  return `${val}`
+}
+
+// Parsed accordingly to RFC 9110
+// https://www.rfc-editor.org/rfc/rfc9110#field.content-range
+function parseRangeHeader (range) {
+  if (range == null || range === '') return { start: 0, end: null, size: null }
+
+  const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null
+  return m
+    ? {
+        start: parseInt(m[1]),
+        end: m[2] ? parseInt(m[2]) : null,
+        size: m[3] ? parseInt(m[3]) : null
+      }
+    : null
+}
+
+const kEnumerableProperty = Object.create(null)
+kEnumerableProperty.enumerable = true
+
+module.exports = {
+  kEnumerableProperty,
+  nop,
+  isDisturbed,
+  isErrored,
+  isReadable,
+  toUSVString,
+  isReadableAborted,
+  isBlobLike,
+  parseOrigin,
+  parseURL,
+  getServerName,
+  isStream,
+  isIterable,
+  isAsyncIterable,
+  isDestroyed,
+  headerNameToString,
+  parseRawHeaders,
+  parseHeaders,
+  parseKeepAliveTimeout,
+  destroy,
+  bodyLength,
+  deepClone,
+  ReadableStreamFrom,
+  isBuffer,
+  validateHandler,
+  getSocketInfo,
+  isFormDataLike,
+  buildURL,
+  throwIfAborted,
+  addAbortListener,
+  parseRangeHeader,
+  nodeMajor,
+  nodeMinor,
+  nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13),
+  safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE']
+}
+
+
+/***/ }),
+
+/***/ 9368:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const Dispatcher = __nccwpck_require__(9724)
+const {
+  ClientDestroyedError,
+  ClientClosedError,
+  InvalidArgumentError
+} = __nccwpck_require__(3862)
+const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(4856)
+
+const kDestroyed = Symbol('destroyed')
+const kClosed = Symbol('closed')
+const kOnDestroyed = Symbol('onDestroyed')
+const kOnClosed = Symbol('onClosed')
+const kInterceptedDispatch = Symbol('Intercepted Dispatch')
+
+class DispatcherBase extends Dispatcher {
+  constructor () {
+    super()
+
+    this[kDestroyed] = false
+    this[kOnDestroyed] = null
+    this[kClosed] = false
+    this[kOnClosed] = []
+  }
+
+  get destroyed () {
+    return this[kDestroyed]
+  }
+
+  get closed () {
+    return this[kClosed]
+  }
+
+  get interceptors () {
+    return this[kInterceptors]
+  }
+
+  set interceptors (newInterceptors) {
+    if (newInterceptors) {
+      for (let i = newInterceptors.length - 1; i >= 0; i--) {
+        const interceptor = this[kInterceptors][i]
+        if (typeof interceptor !== 'function') {
+          throw new InvalidArgumentError('interceptor must be an function')
+        }
+      }
+    }
+
+    this[kInterceptors] = newInterceptors
+  }
+
+  close (callback) {
+    if (callback === undefined) {
+      return new Promise((resolve, reject) => {
+        this.close((err, data) => {
+          return err ? reject(err) : resolve(data)
+        })
+      })
+    }
+
+    if (typeof callback !== 'function') {
+      throw new InvalidArgumentError('invalid callback')
+    }
+
+    if (this[kDestroyed]) {
+      queueMicrotask(() => callback(new ClientDestroyedError(), null))
+      return
+    }
+
+    if (this[kClosed]) {
+      if (this[kOnClosed]) {
+        this[kOnClosed].push(callback)
+      } else {
+        queueMicrotask(() => callback(null, null))
+      }
+      return
+    }
+
+    this[kClosed] = true
+    this[kOnClosed].push(callback)
+
+    const onClosed = () => {
+      const callbacks = this[kOnClosed]
+      this[kOnClosed] = null
+      for (let i = 0; i < callbacks.length; i++) {
+        callbacks[i](null, null)
+      }
+    }
+
+    // Should not error.
+    this[kClose]()
+      .then(() => this.destroy())
+      .then(() => {
+        queueMicrotask(onClosed)
+      })
+  }
+
+  destroy (err, callback) {
+    if (typeof err === 'function') {
+      callback = err
+      err = null
+    }
+
+    if (callback === undefined) {
+      return new Promise((resolve, reject) => {
+        this.destroy(err, (err, data) => {
+          return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data)
+        })
+      })
+    }
+
+    if (typeof callback !== 'function') {
+      throw new InvalidArgumentError('invalid callback')
+    }
+
+    if (this[kDestroyed]) {
+      if (this[kOnDestroyed]) {
+        this[kOnDestroyed].push(callback)
+      } else {
+        queueMicrotask(() => callback(null, null))
+      }
+      return
+    }
+
+    if (!err) {
+      err = new ClientDestroyedError()
+    }
+
+    this[kDestroyed] = true
+    this[kOnDestroyed] = this[kOnDestroyed] || []
+    this[kOnDestroyed].push(callback)
+
+    const onDestroyed = () => {
+      const callbacks = this[kOnDestroyed]
+      this[kOnDestroyed] = null
+      for (let i = 0; i < callbacks.length; i++) {
+        callbacks[i](null, null)
+      }
+    }
+
+    // Should not error.
+    this[kDestroy](err).then(() => {
+      queueMicrotask(onDestroyed)
+    })
+  }
+
+  [kInterceptedDispatch] (opts, handler) {
+    if (!this[kInterceptors] || this[kInterceptors].length === 0) {
+      this[kInterceptedDispatch] = this[kDispatch]
+      return this[kDispatch](opts, handler)
+    }
+
+    let dispatch = this[kDispatch].bind(this)
+    for (let i = this[kInterceptors].length - 1; i >= 0; i--) {
+      dispatch = this[kInterceptors][i](dispatch)
+    }
+    this[kInterceptedDispatch] = dispatch
+    return dispatch(opts, handler)
+  }
+
+  dispatch (opts, handler) {
+    if (!handler || typeof handler !== 'object') {
+      throw new InvalidArgumentError('handler must be an object')
+    }
+
+    try {
+      if (!opts || typeof opts !== 'object') {
+        throw new InvalidArgumentError('opts must be an object.')
+      }
+
+      if (this[kDestroyed] || this[kOnDestroyed]) {
+        throw new ClientDestroyedError()
+      }
+
+      if (this[kClosed]) {
+        throw new ClientClosedError()
+      }
+
+      return this[kInterceptedDispatch](opts, handler)
+    } catch (err) {
+      if (typeof handler.onError !== 'function') {
+        throw new InvalidArgumentError('invalid onError method')
+      }
+
+      handler.onError(err)
+
+      return false
+    }
+  }
+}
+
+module.exports = DispatcherBase
+
+
+/***/ }),
+
+/***/ 9724:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const EventEmitter = __nccwpck_require__(4434)
+
+class Dispatcher extends EventEmitter {
+  dispatch () {
+    throw new Error('not implemented')
+  }
+
+  close () {
+    throw new Error('not implemented')
+  }
+
+  destroy () {
+    throw new Error('not implemented')
+  }
+}
+
+module.exports = Dispatcher
+
+
+/***/ }),
+
+/***/ 1380:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const Busboy = __nccwpck_require__(9766)
+const util = __nccwpck_require__(3465)
+const {
+  ReadableStreamFrom,
+  isBlobLike,
+  isReadableStreamLike,
+  readableStreamClose,
+  createDeferredPromise,
+  fullyReadBody
+} = __nccwpck_require__(2696)
+const { FormData } = __nccwpck_require__(2894)
+const { kState } = __nccwpck_require__(8323)
+const { webidl } = __nccwpck_require__(29)
+const { DOMException, structuredClone } = __nccwpck_require__(4135)
+const { Blob, File: NativeFile } = __nccwpck_require__(181)
+const { kBodyUsed } = __nccwpck_require__(4856)
+const assert = __nccwpck_require__(2613)
+const { isErrored } = __nccwpck_require__(3465)
+const { isUint8Array, isArrayBuffer } = __nccwpck_require__(8253)
+const { File: UndiciFile } = __nccwpck_require__(66)
+const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(3911)
+
+let ReadableStream = globalThis.ReadableStream
+
+/** @type {globalThis['File']} */
+const File = NativeFile ?? UndiciFile
+const textEncoder = new TextEncoder()
+const textDecoder = new TextDecoder()
+
+// https://fetch.spec.whatwg.org/#concept-bodyinit-extract
+function extractBody (object, keepalive = false) {
+  if (!ReadableStream) {
+    ReadableStream = (__nccwpck_require__(3774).ReadableStream)
+  }
+
+  // 1. Let stream be null.
+  let stream = null
+
+  // 2. If object is a ReadableStream object, then set stream to object.
+  if (object instanceof ReadableStream) {
+    stream = object
+  } else if (isBlobLike(object)) {
+    // 3. Otherwise, if object is a Blob object, set stream to the
+    //    result of running object’s get stream.
+    stream = object.stream()
+  } else {
+    // 4. Otherwise, set stream to a new ReadableStream object, and set
+    //    up stream.
+    stream = new ReadableStream({
+      async pull (controller) {
+        controller.enqueue(
+          typeof source === 'string' ? textEncoder.encode(source) : source
+        )
+        queueMicrotask(() => readableStreamClose(controller))
+      },
+      start () {},
+      type: undefined
+    })
+  }
+
+  // 5. Assert: stream is a ReadableStream object.
+  assert(isReadableStreamLike(stream))
+
+  // 6. Let action be null.
+  let action = null
+
+  // 7. Let source be null.
+  let source = null
+
+  // 8. Let length be null.
+  let length = null
+
+  // 9. Let type be null.
+  let type = null
+
+  // 10. Switch on object:
+  if (typeof object === 'string') {
+    // Set source to the UTF-8 encoding of object.
+    // Note: setting source to a Uint8Array here breaks some mocking assumptions.
+    source = object
+
+    // Set type to `text/plain;charset=UTF-8`.
+    type = 'text/plain;charset=UTF-8'
+  } else if (object instanceof URLSearchParams) {
+    // URLSearchParams
+
+    // spec says to run application/x-www-form-urlencoded on body.list
+    // this is implemented in Node.js as apart of an URLSearchParams instance toString method
+    // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490
+    // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100
+
+    // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list.
+    source = object.toString()
+
+    // Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
+    type = 'application/x-www-form-urlencoded;charset=UTF-8'
+  } else if (isArrayBuffer(object)) {
+    // BufferSource/ArrayBuffer
+
+    // Set source to a copy of the bytes held by object.
+    source = new Uint8Array(object.slice())
+  } else if (ArrayBuffer.isView(object)) {
+    // BufferSource/ArrayBufferView
+
+    // Set source to a copy of the bytes held by object.
+    source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
+  } else if (util.isFormDataLike(object)) {
+    const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}`
+    const prefix = `--${boundary}\r\nContent-Disposition: form-data`
+
+    /*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
+    const escape = (str) =>
+      str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
+    const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
+
+    // Set action to this step: run the multipart/form-data
+    // encoding algorithm, with object’s entry list and UTF-8.
+    // - This ensures that the body is immutable and can't be changed afterwords
+    // - That the content-length is calculated in advance.
+    // - And that all parts are pre-encoded and ready to be sent.
+
+    const blobParts = []
+    const rn = new Uint8Array([13, 10]) // '\r\n'
+    length = 0
+    let hasUnknownSizeValue = false
+
+    for (const [name, value] of object) {
+      if (typeof value === 'string') {
+        const chunk = textEncoder.encode(prefix +
+          `; name="${escape(normalizeLinefeeds(name))}"` +
+          `\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
+        blobParts.push(chunk)
+        length += chunk.byteLength
+      } else {
+        const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` +
+          (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' +
+          `Content-Type: ${
+            value.type || 'application/octet-stream'
+          }\r\n\r\n`)
+        blobParts.push(chunk, value, rn)
+        if (typeof value.size === 'number') {
+          length += chunk.byteLength + value.size + rn.byteLength
+        } else {
+          hasUnknownSizeValue = true
+        }
+      }
+    }
+
+    const chunk = textEncoder.encode(`--${boundary}--`)
+    blobParts.push(chunk)
+    length += chunk.byteLength
+    if (hasUnknownSizeValue) {
+      length = null
+    }
+
+    // Set source to object.
+    source = object
+
+    action = async function * () {
+      for (const part of blobParts) {
+        if (part.stream) {
+          yield * part.stream()
+        } else {
+          yield part
+        }
+      }
+    }
+
+    // Set type to `multipart/form-data; boundary=`,
+    // followed by the multipart/form-data boundary string generated
+    // by the multipart/form-data encoding algorithm.
+    type = 'multipart/form-data; boundary=' + boundary
+  } else if (isBlobLike(object)) {
+    // Blob
+
+    // Set source to object.
+    source = object
+
+    // Set length to object’s size.
+    length = object.size
+
+    // If object’s type attribute is not the empty byte sequence, set
+    // type to its value.
+    if (object.type) {
+      type = object.type
+    }
+  } else if (typeof object[Symbol.asyncIterator] === 'function') {
+    // If keepalive is true, then throw a TypeError.
+    if (keepalive) {
+      throw new TypeError('keepalive')
+    }
+
+    // If object is disturbed or locked, then throw a TypeError.
+    if (util.isDisturbed(object) || object.locked) {
+      throw new TypeError(
+        'Response body object should not be disturbed or locked'
+      )
+    }
+
+    stream =
+      object instanceof ReadableStream ? object : ReadableStreamFrom(object)
+  }
+
+  // 11. If source is a byte sequence, then set action to a
+  // step that returns source and length to source’s length.
+  if (typeof source === 'string' || util.isBuffer(source)) {
+    length = Buffer.byteLength(source)
+  }
+
+  // 12. If action is non-null, then run these steps in in parallel:
+  if (action != null) {
+    // Run action.
+    let iterator
+    stream = new ReadableStream({
+      async start () {
+        iterator = action(object)[Symbol.asyncIterator]()
+      },
+      async pull (controller) {
+        const { value, done } = await iterator.next()
+        if (done) {
+          // When running action is done, close stream.
+          queueMicrotask(() => {
+            controller.close()
+          })
+        } else {
+          // Whenever one or more bytes are available and stream is not errored,
+          // enqueue a Uint8Array wrapping an ArrayBuffer containing the available
+          // bytes into stream.
+          if (!isErrored(stream)) {
+            controller.enqueue(new Uint8Array(value))
+          }
+        }
+        return controller.desiredSize > 0
+      },
+      async cancel (reason) {
+        await iterator.return()
+      },
+      type: undefined
+    })
+  }
+
+  // 13. Let body be a body whose stream is stream, source is source,
+  // and length is length.
+  const body = { stream, source, length }
+
+  // 14. Return (body, type).
+  return [body, type]
+}
+
+// https://fetch.spec.whatwg.org/#bodyinit-safely-extract
+function safelyExtractBody (object, keepalive = false) {
+  if (!ReadableStream) {
+    // istanbul ignore next
+    ReadableStream = (__nccwpck_require__(3774).ReadableStream)
+  }
+
+  // To safely extract a body and a `Content-Type` value from
+  // a byte sequence or BodyInit object object, run these steps:
+
+  // 1. If object is a ReadableStream object, then:
+  if (object instanceof ReadableStream) {
+    // Assert: object is neither disturbed nor locked.
+    // istanbul ignore next
+    assert(!util.isDisturbed(object), 'The body has already been consumed.')
+    // istanbul ignore next
+    assert(!object.locked, 'The stream is locked.')
+  }
+
+  // 2. Return the results of extracting object.
+  return extractBody(object, keepalive)
+}
+
+function cloneBody (body) {
+  // To clone a body body, run these steps:
+
+  // https://fetch.spec.whatwg.org/#concept-body-clone
+
+  // 1. Let « out1, out2 » be the result of teeing body’s stream.
+  const [out1, out2] = body.stream.tee()
+  const out2Clone = structuredClone(out2, { transfer: [out2] })
+  // This, for whatever reasons, unrefs out2Clone which allows
+  // the process to exit by itself.
+  const [, finalClone] = out2Clone.tee()
+
+  // 2. Set body’s stream to out1.
+  body.stream = out1
+
+  // 3. Return a body whose stream is out2 and other members are copied from body.
+  return {
+    stream: finalClone,
+    length: body.length,
+    source: body.source
+  }
+}
+
+async function * consumeBody (body) {
+  if (body) {
+    if (isUint8Array(body)) {
+      yield body
+    } else {
+      const stream = body.stream
+
+      if (util.isDisturbed(stream)) {
+        throw new TypeError('The body has already been consumed.')
+      }
+
+      if (stream.locked) {
+        throw new TypeError('The stream is locked.')
+      }
+
+      // Compat.
+      stream[kBodyUsed] = true
+
+      yield * stream
+    }
+  }
+}
+
+function throwIfAborted (state) {
+  if (state.aborted) {
+    throw new DOMException('The operation was aborted.', 'AbortError')
+  }
+}
+
+function bodyMixinMethods (instance) {
+  const methods = {
+    blob () {
+      // The blob() method steps are to return the result of
+      // running consume body with this and the following step
+      // given a byte sequence bytes: return a Blob whose
+      // contents are bytes and whose type attribute is this’s
+      // MIME type.
+      return specConsumeBody(this, (bytes) => {
+        let mimeType = bodyMimeType(this)
+
+        if (mimeType === 'failure') {
+          mimeType = ''
+        } else if (mimeType) {
+          mimeType = serializeAMimeType(mimeType)
+        }
+
+        // Return a Blob whose contents are bytes and type attribute
+        // is mimeType.
+        return new Blob([bytes], { type: mimeType })
+      }, instance)
+    },
+
+    arrayBuffer () {
+      // The arrayBuffer() method steps are to return the result
+      // of running consume body with this and the following step
+      // given a byte sequence bytes: return a new ArrayBuffer
+      // whose contents are bytes.
+      return specConsumeBody(this, (bytes) => {
+        return new Uint8Array(bytes).buffer
+      }, instance)
+    },
+
+    text () {
+      // The text() method steps are to return the result of running
+      // consume body with this and UTF-8 decode.
+      return specConsumeBody(this, utf8DecodeBytes, instance)
+    },
+
+    json () {
+      // The json() method steps are to return the result of running
+      // consume body with this and parse JSON from bytes.
+      return specConsumeBody(this, parseJSONFromBytes, instance)
+    },
+
+    async formData () {
+      webidl.brandCheck(this, instance)
+
+      throwIfAborted(this[kState])
+
+      const contentType = this.headers.get('Content-Type')
+
+      // If mimeType’s essence is "multipart/form-data", then:
+      if (/multipart\/form-data/.test(contentType)) {
+        const headers = {}
+        for (const [key, value] of this.headers) headers[key.toLowerCase()] = value
+
+        const responseFormData = new FormData()
+
+        let busboy
+
+        try {
+          busboy = new Busboy({
+            headers,
+            preservePath: true
+          })
+        } catch (err) {
+          throw new DOMException(`${err}`, 'AbortError')
+        }
+
+        busboy.on('field', (name, value) => {
+          responseFormData.append(name, value)
+        })
+        busboy.on('file', (name, value, filename, encoding, mimeType) => {
+          const chunks = []
+
+          if (encoding === 'base64' || encoding.toLowerCase() === 'base64') {
+            let base64chunk = ''
+
+            value.on('data', (chunk) => {
+              base64chunk += chunk.toString().replace(/[\r\n]/gm, '')
+
+              const end = base64chunk.length - base64chunk.length % 4
+              chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64'))
+
+              base64chunk = base64chunk.slice(end)
+            })
+            value.on('end', () => {
+              chunks.push(Buffer.from(base64chunk, 'base64'))
+              responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
+            })
+          } else {
+            value.on('data', (chunk) => {
+              chunks.push(chunk)
+            })
+            value.on('end', () => {
+              responseFormData.append(name, new File(chunks, filename, { type: mimeType }))
+            })
+          }
+        })
+
+        const busboyResolve = new Promise((resolve, reject) => {
+          busboy.on('finish', resolve)
+          busboy.on('error', (err) => reject(new TypeError(err)))
+        })
+
+        if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk)
+        busboy.end()
+        await busboyResolve
+
+        return responseFormData
+      } else if (/application\/x-www-form-urlencoded/.test(contentType)) {
+        // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then:
+
+        // 1. Let entries be the result of parsing bytes.
+        let entries
+        try {
+          let text = ''
+          // application/x-www-form-urlencoded parser will keep the BOM.
+          // https://url.spec.whatwg.org/#concept-urlencoded-parser
+          // Note that streaming decoder is stateful and cannot be reused
+          const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true })
+
+          for await (const chunk of consumeBody(this[kState].body)) {
+            if (!isUint8Array(chunk)) {
+              throw new TypeError('Expected Uint8Array chunk')
+            }
+            text += streamingDecoder.decode(chunk, { stream: true })
+          }
+          text += streamingDecoder.decode()
+          entries = new URLSearchParams(text)
+        } catch (err) {
+          // istanbul ignore next: Unclear when new URLSearchParams can fail on a string.
+          // 2. If entries is failure, then throw a TypeError.
+          throw Object.assign(new TypeError(), { cause: err })
+        }
+
+        // 3. Return a new FormData object whose entries are entries.
+        const formData = new FormData()
+        for (const [name, value] of entries) {
+          formData.append(name, value)
+        }
+        return formData
+      } else {
+        // Wait a tick before checking if the request has been aborted.
+        // Otherwise, a TypeError can be thrown when an AbortError should.
+        await Promise.resolve()
+
+        throwIfAborted(this[kState])
+
+        // Otherwise, throw a TypeError.
+        throw webidl.errors.exception({
+          header: `${instance.name}.formData`,
+          message: 'Could not parse content as FormData.'
+        })
+      }
+    }
+  }
+
+  return methods
+}
+
+function mixinBody (prototype) {
+  Object.assign(prototype.prototype, bodyMixinMethods(prototype))
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-body-consume-body
+ * @param {Response|Request} object
+ * @param {(value: unknown) => unknown} convertBytesToJSValue
+ * @param {Response|Request} instance
+ */
+async function specConsumeBody (object, convertBytesToJSValue, instance) {
+  webidl.brandCheck(object, instance)
+
+  throwIfAborted(object[kState])
+
+  // 1. If object is unusable, then return a promise rejected
+  //    with a TypeError.
+  if (bodyUnusable(object[kState].body)) {
+    throw new TypeError('Body is unusable')
+  }
+
+  // 2. Let promise be a new promise.
+  const promise = createDeferredPromise()
+
+  // 3. Let errorSteps given error be to reject promise with error.
+  const errorSteps = (error) => promise.reject(error)
+
+  // 4. Let successSteps given a byte sequence data be to resolve
+  //    promise with the result of running convertBytesToJSValue
+  //    with data. If that threw an exception, then run errorSteps
+  //    with that exception.
+  const successSteps = (data) => {
+    try {
+      promise.resolve(convertBytesToJSValue(data))
+    } catch (e) {
+      errorSteps(e)
+    }
+  }
+
+  // 5. If object’s body is null, then run successSteps with an
+  //    empty byte sequence.
+  if (object[kState].body == null) {
+    successSteps(new Uint8Array())
+    return promise.promise
+  }
+
+  // 6. Otherwise, fully read object’s body given successSteps,
+  //    errorSteps, and object’s relevant global object.
+  await fullyReadBody(object[kState].body, successSteps, errorSteps)
+
+  // 7. Return promise.
+  return promise.promise
+}
+
+// https://fetch.spec.whatwg.org/#body-unusable
+function bodyUnusable (body) {
+  // An object including the Body interface mixin is
+  // said to be unusable if its body is non-null and
+  // its body’s stream is disturbed or locked.
+  return body != null && (body.stream.locked || util.isDisturbed(body.stream))
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#utf-8-decode
+ * @param {Buffer} buffer
+ */
+function utf8DecodeBytes (buffer) {
+  if (buffer.length === 0) {
+    return ''
+  }
+
+  // 1. Let buffer be the result of peeking three bytes from
+  //    ioQueue, converted to a byte sequence.
+
+  // 2. If buffer is 0xEF 0xBB 0xBF, then read three
+  //    bytes from ioQueue. (Do nothing with those bytes.)
+  if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) {
+    buffer = buffer.subarray(3)
+  }
+
+  // 3. Process a queue with an instance of UTF-8’s
+  //    decoder, ioQueue, output, and "replacement".
+  const output = textDecoder.decode(buffer)
+
+  // 4. Return output.
+  return output
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value
+ * @param {Uint8Array} bytes
+ */
+function parseJSONFromBytes (bytes) {
+  return JSON.parse(utf8DecodeBytes(bytes))
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-body-mime-type
+ * @param {import('./response').Response|import('./request').Request} object
+ */
+function bodyMimeType (object) {
+  const { headersList } = object[kState]
+  const contentType = headersList.get('content-type')
+
+  if (contentType === null) {
+    return 'failure'
+  }
+
+  return parseMIMEType(contentType)
+}
+
+module.exports = {
+  extractBody,
+  safelyExtractBody,
+  cloneBody,
+  mixinBody
+}
+
+
+/***/ }),
+
+/***/ 4135:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(8167)
+
+const corsSafeListedMethods = ['GET', 'HEAD', 'POST']
+const corsSafeListedMethodsSet = new Set(corsSafeListedMethods)
+
+const nullBodyStatus = [101, 204, 205, 304]
+
+const redirectStatus = [301, 302, 303, 307, 308]
+const redirectStatusSet = new Set(redirectStatus)
+
+// https://fetch.spec.whatwg.org/#block-bad-port
+const badPorts = [
+  '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79',
+  '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137',
+  '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532',
+  '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723',
+  '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697',
+  '10080'
+]
+
+const badPortsSet = new Set(badPorts)
+
+// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies
+const referrerPolicy = [
+  '',
+  'no-referrer',
+  'no-referrer-when-downgrade',
+  'same-origin',
+  'origin',
+  'strict-origin',
+  'origin-when-cross-origin',
+  'strict-origin-when-cross-origin',
+  'unsafe-url'
+]
+const referrerPolicySet = new Set(referrerPolicy)
+
+const requestRedirect = ['follow', 'manual', 'error']
+
+const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE']
+const safeMethodsSet = new Set(safeMethods)
+
+const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors']
+
+const requestCredentials = ['omit', 'same-origin', 'include']
+
+const requestCache = [
+  'default',
+  'no-store',
+  'reload',
+  'no-cache',
+  'force-cache',
+  'only-if-cached'
+]
+
+// https://fetch.spec.whatwg.org/#request-body-header-name
+const requestBodyHeader = [
+  'content-encoding',
+  'content-language',
+  'content-location',
+  'content-type',
+  // See https://github.com/nodejs/undici/issues/2021
+  // 'Content-Length' is a forbidden header name, which is typically
+  // removed in the Headers implementation. However, undici doesn't
+  // filter out headers, so we add it here.
+  'content-length'
+]
+
+// https://fetch.spec.whatwg.org/#enumdef-requestduplex
+const requestDuplex = [
+  'half'
+]
+
+// http://fetch.spec.whatwg.org/#forbidden-method
+const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK']
+const forbiddenMethodsSet = new Set(forbiddenMethods)
+
+const subresource = [
+  'audio',
+  'audioworklet',
+  'font',
+  'image',
+  'manifest',
+  'paintworklet',
+  'script',
+  'style',
+  'track',
+  'video',
+  'xslt',
+  ''
+]
+const subresourceSet = new Set(subresource)
+
+/** @type {globalThis['DOMException']} */
+const DOMException = globalThis.DOMException ?? (() => {
+  // DOMException was only made a global in Node v17.0.0,
+  // but fetch supports >= v16.8.
+  try {
+    atob('~')
+  } catch (err) {
+    return Object.getPrototypeOf(err).constructor
+  }
+})()
+
+let channel
+
+/** @type {globalThis['structuredClone']} */
+const structuredClone =
+  globalThis.structuredClone ??
+  // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js
+  // structuredClone was added in v17.0.0, but fetch supports v16.8
+  function structuredClone (value, options = undefined) {
+    if (arguments.length === 0) {
+      throw new TypeError('missing argument')
+    }
+
+    if (!channel) {
+      channel = new MessageChannel()
+    }
+    channel.port1.unref()
+    channel.port2.unref()
+    channel.port1.postMessage(value, options?.transfer)
+    return receiveMessageOnPort(channel.port2).message
+  }
+
+module.exports = {
+  DOMException,
+  structuredClone,
+  subresource,
+  forbiddenMethods,
+  requestBodyHeader,
+  referrerPolicy,
+  requestRedirect,
+  requestMode,
+  requestCredentials,
+  requestCache,
+  redirectStatus,
+  corsSafeListedMethods,
+  nullBodyStatus,
+  safeMethods,
+  badPorts,
+  requestDuplex,
+  subresourceSet,
+  badPortsSet,
+  redirectStatusSet,
+  corsSafeListedMethodsSet,
+  safeMethodsSet,
+  forbiddenMethodsSet,
+  referrerPolicySet
+}
+
+
+/***/ }),
+
+/***/ 3911:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const assert = __nccwpck_require__(2613)
+const { atob } = __nccwpck_require__(181)
+const { isomorphicDecode } = __nccwpck_require__(2696)
+
+const encoder = new TextEncoder()
+
+/**
+ * @see https://mimesniff.spec.whatwg.org/#http-token-code-point
+ */
+const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
+const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line
+/**
+ * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
+ */
+const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line
+
+// https://fetch.spec.whatwg.org/#data-url-processor
+/** @param {URL} dataURL */
+function dataURLProcessor (dataURL) {
+  // 1. Assert: dataURL’s scheme is "data".
+  assert(dataURL.protocol === 'data:')
+
+  // 2. Let input be the result of running the URL
+  // serializer on dataURL with exclude fragment
+  // set to true.
+  let input = URLSerializer(dataURL, true)
+
+  // 3. Remove the leading "data:" string from input.
+  input = input.slice(5)
+
+  // 4. Let position point at the start of input.
+  const position = { position: 0 }
+
+  // 5. Let mimeType be the result of collecting a
+  // sequence of code points that are not equal
+  // to U+002C (,), given position.
+  let mimeType = collectASequenceOfCodePointsFast(
+    ',',
+    input,
+    position
+  )
+
+  // 6. Strip leading and trailing ASCII whitespace
+  // from mimeType.
+  // Undici implementation note: we need to store the
+  // length because if the mimetype has spaces removed,
+  // the wrong amount will be sliced from the input in
+  // step #9
+  const mimeTypeLength = mimeType.length
+  mimeType = removeASCIIWhitespace(mimeType, true, true)
+
+  // 7. If position is past the end of input, then
+  // return failure
+  if (position.position >= input.length) {
+    return 'failure'
+  }
+
+  // 8. Advance position by 1.
+  position.position++
+
+  // 9. Let encodedBody be the remainder of input.
+  const encodedBody = input.slice(mimeTypeLength + 1)
+
+  // 10. Let body be the percent-decoding of encodedBody.
+  let body = stringPercentDecode(encodedBody)
+
+  // 11. If mimeType ends with U+003B (;), followed by
+  // zero or more U+0020 SPACE, followed by an ASCII
+  // case-insensitive match for "base64", then:
+  if (/;(\u0020){0,}base64$/i.test(mimeType)) {
+    // 1. Let stringBody be the isomorphic decode of body.
+    const stringBody = isomorphicDecode(body)
+
+    // 2. Set body to the forgiving-base64 decode of
+    // stringBody.
+    body = forgivingBase64(stringBody)
+
+    // 3. If body is failure, then return failure.
+    if (body === 'failure') {
+      return 'failure'
+    }
+
+    // 4. Remove the last 6 code points from mimeType.
+    mimeType = mimeType.slice(0, -6)
+
+    // 5. Remove trailing U+0020 SPACE code points from mimeType,
+    // if any.
+    mimeType = mimeType.replace(/(\u0020)+$/, '')
+
+    // 6. Remove the last U+003B (;) code point from mimeType.
+    mimeType = mimeType.slice(0, -1)
+  }
+
+  // 12. If mimeType starts with U+003B (;), then prepend
+  // "text/plain" to mimeType.
+  if (mimeType.startsWith(';')) {
+    mimeType = 'text/plain' + mimeType
+  }
+
+  // 13. Let mimeTypeRecord be the result of parsing
+  // mimeType.
+  let mimeTypeRecord = parseMIMEType(mimeType)
+
+  // 14. If mimeTypeRecord is failure, then set
+  // mimeTypeRecord to text/plain;charset=US-ASCII.
+  if (mimeTypeRecord === 'failure') {
+    mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII')
+  }
+
+  // 15. Return a new data: URL struct whose MIME
+  // type is mimeTypeRecord and body is body.
+  // https://fetch.spec.whatwg.org/#data-url-struct
+  return { mimeType: mimeTypeRecord, body }
+}
+
+// https://url.spec.whatwg.org/#concept-url-serializer
+/**
+ * @param {URL} url
+ * @param {boolean} excludeFragment
+ */
+function URLSerializer (url, excludeFragment = false) {
+  if (!excludeFragment) {
+    return url.href
+  }
+
+  const href = url.href
+  const hashLength = url.hash.length
+
+  return hashLength === 0 ? href : href.substring(0, href.length - hashLength)
+}
+
+// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points
+/**
+ * @param {(char: string) => boolean} condition
+ * @param {string} input
+ * @param {{ position: number }} position
+ */
+function collectASequenceOfCodePoints (condition, input, position) {
+  // 1. Let result be the empty string.
+  let result = ''
+
+  // 2. While position doesn’t point past the end of input and the
+  // code point at position within input meets the condition condition:
+  while (position.position < input.length && condition(input[position.position])) {
+    // 1. Append that code point to the end of result.
+    result += input[position.position]
+
+    // 2. Advance position by 1.
+    position.position++
+  }
+
+  // 3. Return result.
+  return result
+}
+
+/**
+ * A faster collectASequenceOfCodePoints that only works when comparing a single character.
+ * @param {string} char
+ * @param {string} input
+ * @param {{ position: number }} position
+ */
+function collectASequenceOfCodePointsFast (char, input, position) {
+  const idx = input.indexOf(char, position.position)
+  const start = position.position
+
+  if (idx === -1) {
+    position.position = input.length
+    return input.slice(start)
+  }
+
+  position.position = idx
+  return input.slice(start, position.position)
+}
+
+// https://url.spec.whatwg.org/#string-percent-decode
+/** @param {string} input */
+function stringPercentDecode (input) {
+  // 1. Let bytes be the UTF-8 encoding of input.
+  const bytes = encoder.encode(input)
+
+  // 2. Return the percent-decoding of bytes.
+  return percentDecode(bytes)
+}
+
+// https://url.spec.whatwg.org/#percent-decode
+/** @param {Uint8Array} input */
+function percentDecode (input) {
+  // 1. Let output be an empty byte sequence.
+  /** @type {number[]} */
+  const output = []
+
+  // 2. For each byte byte in input:
+  for (let i = 0; i < input.length; i++) {
+    const byte = input[i]
+
+    // 1. If byte is not 0x25 (%), then append byte to output.
+    if (byte !== 0x25) {
+      output.push(byte)
+
+    // 2. Otherwise, if byte is 0x25 (%) and the next two bytes
+    // after byte in input are not in the ranges
+    // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F),
+    // and 0x61 (a) to 0x66 (f), all inclusive, append byte
+    // to output.
+    } else if (
+      byte === 0x25 &&
+      !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2]))
+    ) {
+      output.push(0x25)
+
+    // 3. Otherwise:
+    } else {
+      // 1. Let bytePoint be the two bytes after byte in input,
+      // decoded, and then interpreted as hexadecimal number.
+      const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2])
+      const bytePoint = Number.parseInt(nextTwoBytes, 16)
+
+      // 2. Append a byte whose value is bytePoint to output.
+      output.push(bytePoint)
+
+      // 3. Skip the next two bytes in input.
+      i += 2
+    }
+  }
+
+  // 3. Return output.
+  return Uint8Array.from(output)
+}
+
+// https://mimesniff.spec.whatwg.org/#parse-a-mime-type
+/** @param {string} input */
+function parseMIMEType (input) {
+  // 1. Remove any leading and trailing HTTP whitespace
+  // from input.
+  input = removeHTTPWhitespace(input, true, true)
+
+  // 2. Let position be a position variable for input,
+  // initially pointing at the start of input.
+  const position = { position: 0 }
+
+  // 3. Let type be the result of collecting a sequence
+  // of code points that are not U+002F (/) from
+  // input, given position.
+  const type = collectASequenceOfCodePointsFast(
+    '/',
+    input,
+    position
+  )
+
+  // 4. If type is the empty string or does not solely
+  // contain HTTP token code points, then return failure.
+  // https://mimesniff.spec.whatwg.org/#http-token-code-point
+  if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) {
+    return 'failure'
+  }
+
+  // 5. If position is past the end of input, then return
+  // failure
+  if (position.position > input.length) {
+    return 'failure'
+  }
+
+  // 6. Advance position by 1. (This skips past U+002F (/).)
+  position.position++
+
+  // 7. Let subtype be the result of collecting a sequence of
+  // code points that are not U+003B (;) from input, given
+  // position.
+  let subtype = collectASequenceOfCodePointsFast(
+    ';',
+    input,
+    position
+  )
+
+  // 8. Remove any trailing HTTP whitespace from subtype.
+  subtype = removeHTTPWhitespace(subtype, false, true)
+
+  // 9. If subtype is the empty string or does not solely
+  // contain HTTP token code points, then return failure.
+  if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) {
+    return 'failure'
+  }
+
+  const typeLowercase = type.toLowerCase()
+  const subtypeLowercase = subtype.toLowerCase()
+
+  // 10. Let mimeType be a new MIME type record whose type
+  // is type, in ASCII lowercase, and subtype is subtype,
+  // in ASCII lowercase.
+  // https://mimesniff.spec.whatwg.org/#mime-type
+  const mimeType = {
+    type: typeLowercase,
+    subtype: subtypeLowercase,
+    /** @type {Map<string, string>} */
+    parameters: new Map(),
+    // https://mimesniff.spec.whatwg.org/#mime-type-essence
+    essence: `${typeLowercase}/${subtypeLowercase}`
+  }
+
+  // 11. While position is not past the end of input:
+  while (position.position < input.length) {
+    // 1. Advance position by 1. (This skips past U+003B (;).)
+    position.position++
+
+    // 2. Collect a sequence of code points that are HTTP
+    // whitespace from input given position.
+    collectASequenceOfCodePoints(
+      // https://fetch.spec.whatwg.org/#http-whitespace
+      char => HTTP_WHITESPACE_REGEX.test(char),
+      input,
+      position
+    )
+
+    // 3. Let parameterName be the result of collecting a
+    // sequence of code points that are not U+003B (;)
+    // or U+003D (=) from input, given position.
+    let parameterName = collectASequenceOfCodePoints(
+      (char) => char !== ';' && char !== '=',
+      input,
+      position
+    )
+
+    // 4. Set parameterName to parameterName, in ASCII
+    // lowercase.
+    parameterName = parameterName.toLowerCase()
+
+    // 5. If position is not past the end of input, then:
+    if (position.position < input.length) {
+      // 1. If the code point at position within input is
+      // U+003B (;), then continue.
+      if (input[position.position] === ';') {
+        continue
+      }
+
+      // 2. Advance position by 1. (This skips past U+003D (=).)
+      position.position++
+    }
+
+    // 6. If position is past the end of input, then break.
+    if (position.position > input.length) {
+      break
+    }
+
+    // 7. Let parameterValue be null.
+    let parameterValue = null
+
+    // 8. If the code point at position within input is
+    // U+0022 ("), then:
+    if (input[position.position] === '"') {
+      // 1. Set parameterValue to the result of collecting
+      // an HTTP quoted string from input, given position
+      // and the extract-value flag.
+      parameterValue = collectAnHTTPQuotedString(input, position, true)
+
+      // 2. Collect a sequence of code points that are not
+      // U+003B (;) from input, given position.
+      collectASequenceOfCodePointsFast(
+        ';',
+        input,
+        position
+      )
+
+    // 9. Otherwise:
+    } else {
+      // 1. Set parameterValue to the result of collecting
+      // a sequence of code points that are not U+003B (;)
+      // from input, given position.
+      parameterValue = collectASequenceOfCodePointsFast(
+        ';',
+        input,
+        position
+      )
+
+      // 2. Remove any trailing HTTP whitespace from parameterValue.
+      parameterValue = removeHTTPWhitespace(parameterValue, false, true)
+
+      // 3. If parameterValue is the empty string, then continue.
+      if (parameterValue.length === 0) {
+        continue
+      }
+    }
+
+    // 10. If all of the following are true
+    // - parameterName is not the empty string
+    // - parameterName solely contains HTTP token code points
+    // - parameterValue solely contains HTTP quoted-string token code points
+    // - mimeType’s parameters[parameterName] does not exist
+    // then set mimeType’s parameters[parameterName] to parameterValue.
+    if (
+      parameterName.length !== 0 &&
+      HTTP_TOKEN_CODEPOINTS.test(parameterName) &&
+      (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) &&
+      !mimeType.parameters.has(parameterName)
+    ) {
+      mimeType.parameters.set(parameterName, parameterValue)
+    }
+  }
+
+  // 12. Return mimeType.
+  return mimeType
+}
+
+// https://infra.spec.whatwg.org/#forgiving-base64-decode
+/** @param {string} data */
+function forgivingBase64 (data) {
+  // 1. Remove all ASCII whitespace from data.
+  data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '')  // eslint-disable-line
+
+  // 2. If data’s code point length divides by 4 leaving
+  // no remainder, then:
+  if (data.length % 4 === 0) {
+    // 1. If data ends with one or two U+003D (=) code points,
+    // then remove them from data.
+    data = data.replace(/=?=$/, '')
+  }
+
+  // 3. If data’s code point length divides by 4 leaving
+  // a remainder of 1, then return failure.
+  if (data.length % 4 === 1) {
+    return 'failure'
+  }
+
+  // 4. If data contains a code point that is not one of
+  //  U+002B (+)
+  //  U+002F (/)
+  //  ASCII alphanumeric
+  // then return failure.
+  if (/[^+/0-9A-Za-z]/.test(data)) {
+    return 'failure'
+  }
+
+  const binary = atob(data)
+  const bytes = new Uint8Array(binary.length)
+
+  for (let byte = 0; byte < binary.length; byte++) {
+    bytes[byte] = binary.charCodeAt(byte)
+  }
+
+  return bytes
+}
+
+// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string
+// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string
+/**
+ * @param {string} input
+ * @param {{ position: number }} position
+ * @param {boolean?} extractValue
+ */
+function collectAnHTTPQuotedString (input, position, extractValue) {
+  // 1. Let positionStart be position.
+  const positionStart = position.position
+
+  // 2. Let value be the empty string.
+  let value = ''
+
+  // 3. Assert: the code point at position within input
+  // is U+0022 (").
+  assert(input[position.position] === '"')
+
+  // 4. Advance position by 1.
+  position.position++
+
+  // 5. While true:
+  while (true) {
+    // 1. Append the result of collecting a sequence of code points
+    // that are not U+0022 (") or U+005C (\) from input, given
+    // position, to value.
+    value += collectASequenceOfCodePoints(
+      (char) => char !== '"' && char !== '\\',
+      input,
+      position
+    )
+
+    // 2. If position is past the end of input, then break.
+    if (position.position >= input.length) {
+      break
+    }
+
+    // 3. Let quoteOrBackslash be the code point at position within
+    // input.
+    const quoteOrBackslash = input[position.position]
+
+    // 4. Advance position by 1.
+    position.position++
+
+    // 5. If quoteOrBackslash is U+005C (\), then:
+    if (quoteOrBackslash === '\\') {
+      // 1. If position is past the end of input, then append
+      // U+005C (\) to value and break.
+      if (position.position >= input.length) {
+        value += '\\'
+        break
+      }
+
+      // 2. Append the code point at position within input to value.
+      value += input[position.position]
+
+      // 3. Advance position by 1.
+      position.position++
+
+    // 6. Otherwise:
+    } else {
+      // 1. Assert: quoteOrBackslash is U+0022 (").
+      assert(quoteOrBackslash === '"')
+
+      // 2. Break.
+      break
+    }
+  }
+
+  // 6. If the extract-value flag is set, then return value.
+  if (extractValue) {
+    return value
+  }
+
+  // 7. Return the code points from positionStart to position,
+  // inclusive, within input.
+  return input.slice(positionStart, position.position)
+}
+
+/**
+ * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type
+ */
+function serializeAMimeType (mimeType) {
+  assert(mimeType !== 'failure')
+  const { parameters, essence } = mimeType
+
+  // 1. Let serialization be the concatenation of mimeType’s
+  //    type, U+002F (/), and mimeType’s subtype.
+  let serialization = essence
+
+  // 2. For each name → value of mimeType’s parameters:
+  for (let [name, value] of parameters.entries()) {
+    // 1. Append U+003B (;) to serialization.
+    serialization += ';'
+
+    // 2. Append name to serialization.
+    serialization += name
+
+    // 3. Append U+003D (=) to serialization.
+    serialization += '='
+
+    // 4. If value does not solely contain HTTP token code
+    //    points or value is the empty string, then:
+    if (!HTTP_TOKEN_CODEPOINTS.test(value)) {
+      // 1. Precede each occurence of U+0022 (") or
+      //    U+005C (\) in value with U+005C (\).
+      value = value.replace(/(\\|")/g, '\\$1')
+
+      // 2. Prepend U+0022 (") to value.
+      value = '"' + value
+
+      // 3. Append U+0022 (") to value.
+      value += '"'
+    }
+
+    // 5. Append value to serialization.
+    serialization += value
+  }
+
+  // 3. Return serialization.
+  return serialization
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-whitespace
+ * @param {string} char
+ */
+function isHTTPWhiteSpace (char) {
+  return char === '\r' || char === '\n' || char === '\t' || char === ' '
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-whitespace
+ * @param {string} str
+ */
+function removeHTTPWhitespace (str, leading = true, trailing = true) {
+  let lead = 0
+  let trail = str.length - 1
+
+  if (leading) {
+    for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++);
+  }
+
+  if (trailing) {
+    for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--);
+  }
+
+  return str.slice(lead, trail + 1)
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#ascii-whitespace
+ * @param {string} char
+ */
+function isASCIIWhitespace (char) {
+  return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' '
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace
+ */
+function removeASCIIWhitespace (str, leading = true, trailing = true) {
+  let lead = 0
+  let trail = str.length - 1
+
+  if (leading) {
+    for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++);
+  }
+
+  if (trailing) {
+    for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--);
+  }
+
+  return str.slice(lead, trail + 1)
+}
+
+module.exports = {
+  dataURLProcessor,
+  URLSerializer,
+  collectASequenceOfCodePoints,
+  collectASequenceOfCodePointsFast,
+  stringPercentDecode,
+  parseMIMEType,
+  collectAnHTTPQuotedString,
+  serializeAMimeType
+}
+
+
+/***/ }),
+
+/***/ 66:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { Blob, File: NativeFile } = __nccwpck_require__(181)
+const { types } = __nccwpck_require__(9023)
+const { kState } = __nccwpck_require__(8323)
+const { isBlobLike } = __nccwpck_require__(2696)
+const { webidl } = __nccwpck_require__(29)
+const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(3911)
+const { kEnumerableProperty } = __nccwpck_require__(3465)
+const encoder = new TextEncoder()
+
+class File extends Blob {
+  constructor (fileBits, fileName, options = {}) {
+    // The File constructor is invoked with two or three parameters, depending
+    // on whether the optional dictionary parameter is used. When the File()
+    // constructor is invoked, user agents must run the following steps:
+    webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' })
+
+    fileBits = webidl.converters['sequence<BlobPart>'](fileBits)
+    fileName = webidl.converters.USVString(fileName)
+    options = webidl.converters.FilePropertyBag(options)
+
+    // 1. Let bytes be the result of processing blob parts given fileBits and
+    // options.
+    // Note: Blob handles this for us
+
+    // 2. Let n be the fileName argument to the constructor.
+    const n = fileName
+
+    // 3. Process FilePropertyBag dictionary argument by running the following
+    // substeps:
+
+    //    1. If the type member is provided and is not the empty string, let t
+    //    be set to the type dictionary member. If t contains any characters
+    //    outside the range U+0020 to U+007E, then set t to the empty string
+    //    and return from these substeps.
+    //    2. Convert every character in t to ASCII lowercase.
+    let t = options.type
+    let d
+
+    // eslint-disable-next-line no-labels
+    substep: {
+      if (t) {
+        t = parseMIMEType(t)
+
+        if (t === 'failure') {
+          t = ''
+          // eslint-disable-next-line no-labels
+          break substep
+        }
+
+        t = serializeAMimeType(t).toLowerCase()
+      }
+
+      //    3. If the lastModified member is provided, let d be set to the
+      //    lastModified dictionary member. If it is not provided, set d to the
+      //    current date and time represented as the number of milliseconds since
+      //    the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
+      d = options.lastModified
+    }
+
+    // 4. Return a new File object F such that:
+    // F refers to the bytes byte sequence.
+    // F.size is set to the number of total bytes in bytes.
+    // F.name is set to n.
+    // F.type is set to t.
+    // F.lastModified is set to d.
+
+    super(processBlobParts(fileBits, options), { type: t })
+    this[kState] = {
+      name: n,
+      lastModified: d,
+      type: t
+    }
+  }
+
+  get name () {
+    webidl.brandCheck(this, File)
+
+    return this[kState].name
+  }
+
+  get lastModified () {
+    webidl.brandCheck(this, File)
+
+    return this[kState].lastModified
+  }
+
+  get type () {
+    webidl.brandCheck(this, File)
+
+    return this[kState].type
+  }
+}
+
+class FileLike {
+  constructor (blobLike, fileName, options = {}) {
+    // TODO: argument idl type check
+
+    // The File constructor is invoked with two or three parameters, depending
+    // on whether the optional dictionary parameter is used. When the File()
+    // constructor is invoked, user agents must run the following steps:
+
+    // 1. Let bytes be the result of processing blob parts given fileBits and
+    // options.
+
+    // 2. Let n be the fileName argument to the constructor.
+    const n = fileName
+
+    // 3. Process FilePropertyBag dictionary argument by running the following
+    // substeps:
+
+    //    1. If the type member is provided and is not the empty string, let t
+    //    be set to the type dictionary member. If t contains any characters
+    //    outside the range U+0020 to U+007E, then set t to the empty string
+    //    and return from these substeps.
+    //    TODO
+    const t = options.type
+
+    //    2. Convert every character in t to ASCII lowercase.
+    //    TODO
+
+    //    3. If the lastModified member is provided, let d be set to the
+    //    lastModified dictionary member. If it is not provided, set d to the
+    //    current date and time represented as the number of milliseconds since
+    //    the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]).
+    const d = options.lastModified ?? Date.now()
+
+    // 4. Return a new File object F such that:
+    // F refers to the bytes byte sequence.
+    // F.size is set to the number of total bytes in bytes.
+    // F.name is set to n.
+    // F.type is set to t.
+    // F.lastModified is set to d.
+
+    this[kState] = {
+      blobLike,
+      name: n,
+      type: t,
+      lastModified: d
+    }
+  }
+
+  stream (...args) {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].blobLike.stream(...args)
+  }
+
+  arrayBuffer (...args) {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].blobLike.arrayBuffer(...args)
+  }
+
+  slice (...args) {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].blobLike.slice(...args)
+  }
+
+  text (...args) {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].blobLike.text(...args)
+  }
+
+  get size () {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].blobLike.size
+  }
+
+  get type () {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].blobLike.type
+  }
+
+  get name () {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].name
+  }
+
+  get lastModified () {
+    webidl.brandCheck(this, FileLike)
+
+    return this[kState].lastModified
+  }
+
+  get [Symbol.toStringTag] () {
+    return 'File'
+  }
+}
+
+Object.defineProperties(File.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'File',
+    configurable: true
+  },
+  name: kEnumerableProperty,
+  lastModified: kEnumerableProperty
+})
+
+webidl.converters.Blob = webidl.interfaceConverter(Blob)
+
+webidl.converters.BlobPart = function (V, opts) {
+  if (webidl.util.Type(V) === 'Object') {
+    if (isBlobLike(V)) {
+      return webidl.converters.Blob(V, { strict: false })
+    }
+
+    if (
+      ArrayBuffer.isView(V) ||
+      types.isAnyArrayBuffer(V)
+    ) {
+      return webidl.converters.BufferSource(V, opts)
+    }
+  }
+
+  return webidl.converters.USVString(V, opts)
+}
+
+webidl.converters['sequence<BlobPart>'] = webidl.sequenceConverter(
+  webidl.converters.BlobPart
+)
+
+// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag
+webidl.converters.FilePropertyBag = webidl.dictionaryConverter([
+  {
+    key: 'lastModified',
+    converter: webidl.converters['long long'],
+    get defaultValue () {
+      return Date.now()
+    }
+  },
+  {
+    key: 'type',
+    converter: webidl.converters.DOMString,
+    defaultValue: ''
+  },
+  {
+    key: 'endings',
+    converter: (value) => {
+      value = webidl.converters.DOMString(value)
+      value = value.toLowerCase()
+
+      if (value !== 'native') {
+        value = 'transparent'
+      }
+
+      return value
+    },
+    defaultValue: 'transparent'
+  }
+])
+
+/**
+ * @see https://www.w3.org/TR/FileAPI/#process-blob-parts
+ * @param {(NodeJS.TypedArray|Blob|string)[]} parts
+ * @param {{ type: string, endings: string }} options
+ */
+function processBlobParts (parts, options) {
+  // 1. Let bytes be an empty sequence of bytes.
+  /** @type {NodeJS.TypedArray[]} */
+  const bytes = []
+
+  // 2. For each element in parts:
+  for (const element of parts) {
+    // 1. If element is a USVString, run the following substeps:
+    if (typeof element === 'string') {
+      // 1. Let s be element.
+      let s = element
+
+      // 2. If the endings member of options is "native", set s
+      //    to the result of converting line endings to native
+      //    of element.
+      if (options.endings === 'native') {
+        s = convertLineEndingsNative(s)
+      }
+
+      // 3. Append the result of UTF-8 encoding s to bytes.
+      bytes.push(encoder.encode(s))
+    } else if (
+      types.isAnyArrayBuffer(element) ||
+      types.isTypedArray(element)
+    ) {
+      // 2. If element is a BufferSource, get a copy of the
+      //    bytes held by the buffer source, and append those
+      //    bytes to bytes.
+      if (!element.buffer) { // ArrayBuffer
+        bytes.push(new Uint8Array(element))
+      } else {
+        bytes.push(
+          new Uint8Array(element.buffer, element.byteOffset, element.byteLength)
+        )
+      }
+    } else if (isBlobLike(element)) {
+      // 3. If element is a Blob, append the bytes it represents
+      //    to bytes.
+      bytes.push(element)
+    }
+  }
+
+  // 3. Return bytes.
+  return bytes
+}
+
+/**
+ * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native
+ * @param {string} s
+ */
+function convertLineEndingsNative (s) {
+  // 1. Let native line ending be be the code point U+000A LF.
+  let nativeLineEnding = '\n'
+
+  // 2. If the underlying platform’s conventions are to
+  //    represent newlines as a carriage return and line feed
+  //    sequence, set native line ending to the code point
+  //    U+000D CR followed by the code point U+000A LF.
+  if (process.platform === 'win32') {
+    nativeLineEnding = '\r\n'
+  }
+
+  return s.replace(/\r?\n/g, nativeLineEnding)
+}
+
+// If this function is moved to ./util.js, some tools (such as
+// rollup) will warn about circular dependencies. See:
+// https://github.com/nodejs/undici/issues/1629
+function isFileLike (object) {
+  return (
+    (NativeFile && object instanceof NativeFile) ||
+    object instanceof File || (
+      object &&
+      (typeof object.stream === 'function' ||
+      typeof object.arrayBuffer === 'function') &&
+      object[Symbol.toStringTag] === 'File'
+    )
+  )
+}
+
+module.exports = { File, FileLike, isFileLike }
+
+
+/***/ }),
+
+/***/ 2894:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(2696)
+const { kState } = __nccwpck_require__(8323)
+const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(66)
+const { webidl } = __nccwpck_require__(29)
+const { Blob, File: NativeFile } = __nccwpck_require__(181)
+
+/** @type {globalThis['File']} */
+const File = NativeFile ?? UndiciFile
+
+// https://xhr.spec.whatwg.org/#formdata
+class FormData {
+  constructor (form) {
+    if (form !== undefined) {
+      throw webidl.errors.conversionFailed({
+        prefix: 'FormData constructor',
+        argument: 'Argument 1',
+        types: ['undefined']
+      })
+    }
+
+    this[kState] = []
+  }
+
+  append (name, value, filename = undefined) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' })
+
+    if (arguments.length === 3 && !isBlobLike(value)) {
+      throw new TypeError(
+        "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'"
+      )
+    }
+
+    // 1. Let value be value if given; otherwise blobValue.
+
+    name = webidl.converters.USVString(name)
+    value = isBlobLike(value)
+      ? webidl.converters.Blob(value, { strict: false })
+      : webidl.converters.USVString(value)
+    filename = arguments.length === 3
+      ? webidl.converters.USVString(filename)
+      : undefined
+
+    // 2. Let entry be the result of creating an entry with
+    // name, value, and filename if given.
+    const entry = makeEntry(name, value, filename)
+
+    // 3. Append entry to this’s entry list.
+    this[kState].push(entry)
+  }
+
+  delete (name) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' })
+
+    name = webidl.converters.USVString(name)
+
+    // The delete(name) method steps are to remove all entries whose name
+    // is name from this’s entry list.
+    this[kState] = this[kState].filter(entry => entry.name !== name)
+  }
+
+  get (name) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' })
+
+    name = webidl.converters.USVString(name)
+
+    // 1. If there is no entry whose name is name in this’s entry list,
+    // then return null.
+    const idx = this[kState].findIndex((entry) => entry.name === name)
+    if (idx === -1) {
+      return null
+    }
+
+    // 2. Return the value of the first entry whose name is name from
+    // this’s entry list.
+    return this[kState][idx].value
+  }
+
+  getAll (name) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' })
+
+    name = webidl.converters.USVString(name)
+
+    // 1. If there is no entry whose name is name in this’s entry list,
+    // then return the empty list.
+    // 2. Return the values of all entries whose name is name, in order,
+    // from this’s entry list.
+    return this[kState]
+      .filter((entry) => entry.name === name)
+      .map((entry) => entry.value)
+  }
+
+  has (name) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' })
+
+    name = webidl.converters.USVString(name)
+
+    // The has(name) method steps are to return true if there is an entry
+    // whose name is name in this’s entry list; otherwise false.
+    return this[kState].findIndex((entry) => entry.name === name) !== -1
+  }
+
+  set (name, value, filename = undefined) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' })
+
+    if (arguments.length === 3 && !isBlobLike(value)) {
+      throw new TypeError(
+        "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'"
+      )
+    }
+
+    // The set(name, value) and set(name, blobValue, filename) method steps
+    // are:
+
+    // 1. Let value be value if given; otherwise blobValue.
+
+    name = webidl.converters.USVString(name)
+    value = isBlobLike(value)
+      ? webidl.converters.Blob(value, { strict: false })
+      : webidl.converters.USVString(value)
+    filename = arguments.length === 3
+      ? toUSVString(filename)
+      : undefined
+
+    // 2. Let entry be the result of creating an entry with name, value, and
+    // filename if given.
+    const entry = makeEntry(name, value, filename)
+
+    // 3. If there are entries in this’s entry list whose name is name, then
+    // replace the first such entry with entry and remove the others.
+    const idx = this[kState].findIndex((entry) => entry.name === name)
+    if (idx !== -1) {
+      this[kState] = [
+        ...this[kState].slice(0, idx),
+        entry,
+        ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name)
+      ]
+    } else {
+      // 4. Otherwise, append entry to this’s entry list.
+      this[kState].push(entry)
+    }
+  }
+
+  entries () {
+    webidl.brandCheck(this, FormData)
+
+    return makeIterator(
+      () => this[kState].map(pair => [pair.name, pair.value]),
+      'FormData',
+      'key+value'
+    )
+  }
+
+  keys () {
+    webidl.brandCheck(this, FormData)
+
+    return makeIterator(
+      () => this[kState].map(pair => [pair.name, pair.value]),
+      'FormData',
+      'key'
+    )
+  }
+
+  values () {
+    webidl.brandCheck(this, FormData)
+
+    return makeIterator(
+      () => this[kState].map(pair => [pair.name, pair.value]),
+      'FormData',
+      'value'
+    )
+  }
+
+  /**
+   * @param {(value: string, key: string, self: FormData) => void} callbackFn
+   * @param {unknown} thisArg
+   */
+  forEach (callbackFn, thisArg = globalThis) {
+    webidl.brandCheck(this, FormData)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' })
+
+    if (typeof callbackFn !== 'function') {
+      throw new TypeError(
+        "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'."
+      )
+    }
+
+    for (const [key, value] of this) {
+      callbackFn.apply(thisArg, [value, key, this])
+    }
+  }
+}
+
+FormData.prototype[Symbol.iterator] = FormData.prototype.entries
+
+Object.defineProperties(FormData.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'FormData',
+    configurable: true
+  }
+})
+
+/**
+ * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry
+ * @param {string} name
+ * @param {string|Blob} value
+ * @param {?string} filename
+ * @returns
+ */
+function makeEntry (name, value, filename) {
+  // 1. Set name to the result of converting name into a scalar value string.
+  // "To convert a string into a scalar value string, replace any surrogates
+  //  with U+FFFD."
+  // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end
+  name = Buffer.from(name).toString('utf8')
+
+  // 2. If value is a string, then set value to the result of converting
+  //    value into a scalar value string.
+  if (typeof value === 'string') {
+    value = Buffer.from(value).toString('utf8')
+  } else {
+    // 3. Otherwise:
+
+    // 1. If value is not a File object, then set value to a new File object,
+    //    representing the same bytes, whose name attribute value is "blob"
+    if (!isFileLike(value)) {
+      value = value instanceof Blob
+        ? new File([value], 'blob', { type: value.type })
+        : new FileLike(value, 'blob', { type: value.type })
+    }
+
+    // 2. If filename is given, then set value to a new File object,
+    //    representing the same bytes, whose name attribute is filename.
+    if (filename !== undefined) {
+      /** @type {FilePropertyBag} */
+      const options = {
+        type: value.type,
+        lastModified: value.lastModified
+      }
+
+      value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile
+        ? new File([value], filename, options)
+        : new FileLike(value, filename, options)
+    }
+  }
+
+  // 4. Return an entry whose name is name and whose value is value.
+  return { name, value }
+}
+
+module.exports = { FormData }
+
+
+/***/ }),
+
+/***/ 9963:
+/***/ ((module) => {
+
+
+
+// In case of breaking changes, increase the version
+// number to avoid conflicts.
+const globalOrigin = Symbol.for('undici.globalOrigin.1')
+
+function getGlobalOrigin () {
+  return globalThis[globalOrigin]
+}
+
+function setGlobalOrigin (newOrigin) {
+  if (newOrigin === undefined) {
+    Object.defineProperty(globalThis, globalOrigin, {
+      value: undefined,
+      writable: true,
+      enumerable: false,
+      configurable: false
+    })
+
+    return
+  }
+
+  const parsedURL = new URL(newOrigin)
+
+  if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') {
+    throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`)
+  }
+
+  Object.defineProperty(globalThis, globalOrigin, {
+    value: parsedURL,
+    writable: true,
+    enumerable: false,
+    configurable: false
+  })
+}
+
+module.exports = {
+  getGlobalOrigin,
+  setGlobalOrigin
+}
+
+
+/***/ }),
+
+/***/ 2908:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+// https://github.com/Ethan-Arrowood/undici-fetch
+
+
+
+const { kHeadersList, kConstruct } = __nccwpck_require__(4856)
+const { kGuard } = __nccwpck_require__(8323)
+const { kEnumerableProperty } = __nccwpck_require__(3465)
+const {
+  makeIterator,
+  isValidHeaderName,
+  isValidHeaderValue
+} = __nccwpck_require__(2696)
+const { webidl } = __nccwpck_require__(29)
+const assert = __nccwpck_require__(2613)
+
+const kHeadersMap = Symbol('headers map')
+const kHeadersSortedMap = Symbol('headers map sorted')
+
+/**
+ * @param {number} code
+ */
+function isHTTPWhiteSpaceCharCode (code) {
+  return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize
+ * @param {string} potentialValue
+ */
+function headerValueNormalize (potentialValue) {
+  //  To normalize a byte sequence potentialValue, remove
+  //  any leading and trailing HTTP whitespace bytes from
+  //  potentialValue.
+  let i = 0; let j = potentialValue.length
+
+  while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j
+  while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i
+
+  return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j)
+}
+
+function fill (headers, object) {
+  // To fill a Headers object headers with a given object object, run these steps:
+
+  // 1. If object is a sequence, then for each header in object:
+  // Note: webidl conversion to array has already been done.
+  if (Array.isArray(object)) {
+    for (let i = 0; i < object.length; ++i) {
+      const header = object[i]
+      // 1. If header does not contain exactly two items, then throw a TypeError.
+      if (header.length !== 2) {
+        throw webidl.errors.exception({
+          header: 'Headers constructor',
+          message: `expected name/value pair to be length 2, found ${header.length}.`
+        })
+      }
+
+      // 2. Append (header’s first item, header’s second item) to headers.
+      appendHeader(headers, header[0], header[1])
+    }
+  } else if (typeof object === 'object' && object !== null) {
+    // Note: null should throw
+
+    // 2. Otherwise, object is a record, then for each key → value in object,
+    //    append (key, value) to headers
+    const keys = Object.keys(object)
+    for (let i = 0; i < keys.length; ++i) {
+      appendHeader(headers, keys[i], object[keys[i]])
+    }
+  } else {
+    throw webidl.errors.conversionFailed({
+      prefix: 'Headers constructor',
+      argument: 'Argument 1',
+      types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
+    })
+  }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-headers-append
+ */
+function appendHeader (headers, name, value) {
+  // 1. Normalize value.
+  value = headerValueNormalize(value)
+
+  // 2. If name is not a header name or value is not a
+  //    header value, then throw a TypeError.
+  if (!isValidHeaderName(name)) {
+    throw webidl.errors.invalidArgument({
+      prefix: 'Headers.append',
+      value: name,
+      type: 'header name'
+    })
+  } else if (!isValidHeaderValue(value)) {
+    throw webidl.errors.invalidArgument({
+      prefix: 'Headers.append',
+      value,
+      type: 'header value'
+    })
+  }
+
+  // 3. If headers’s guard is "immutable", then throw a TypeError.
+  // 4. Otherwise, if headers’s guard is "request" and name is a
+  //    forbidden header name, return.
+  // Note: undici does not implement forbidden header names
+  if (headers[kGuard] === 'immutable') {
+    throw new TypeError('immutable')
+  } else if (headers[kGuard] === 'request-no-cors') {
+    // 5. Otherwise, if headers’s guard is "request-no-cors":
+    // TODO
+  }
+
+  // 6. Otherwise, if headers’s guard is "response" and name is a
+  //    forbidden response-header name, return.
+
+  // 7. Append (name, value) to headers’s header list.
+  return headers[kHeadersList].append(name, value)
+
+  // 8. If headers’s guard is "request-no-cors", then remove
+  //    privileged no-CORS request headers from headers
+}
+
+class HeadersList {
+  /** @type {[string, string][]|null} */
+  cookies = null
+
+  constructor (init) {
+    if (init instanceof HeadersList) {
+      this[kHeadersMap] = new Map(init[kHeadersMap])
+      this[kHeadersSortedMap] = init[kHeadersSortedMap]
+      this.cookies = init.cookies === null ? null : [...init.cookies]
+    } else {
+      this[kHeadersMap] = new Map(init)
+      this[kHeadersSortedMap] = null
+    }
+  }
+
+  // https://fetch.spec.whatwg.org/#header-list-contains
+  contains (name) {
+    // A header list list contains a header name name if list
+    // contains a header whose name is a byte-case-insensitive
+    // match for name.
+    name = name.toLowerCase()
+
+    return this[kHeadersMap].has(name)
+  }
+
+  clear () {
+    this[kHeadersMap].clear()
+    this[kHeadersSortedMap] = null
+    this.cookies = null
+  }
+
+  // https://fetch.spec.whatwg.org/#concept-header-list-append
+  append (name, value) {
+    this[kHeadersSortedMap] = null
+
+    // 1. If list contains name, then set name to the first such
+    //    header’s name.
+    const lowercaseName = name.toLowerCase()
+    const exists = this[kHeadersMap].get(lowercaseName)
+
+    // 2. Append (name, value) to list.
+    if (exists) {
+      const delimiter = lowercaseName === 'cookie' ? '; ' : ', '
+      this[kHeadersMap].set(lowercaseName, {
+        name: exists.name,
+        value: `${exists.value}${delimiter}${value}`
+      })
+    } else {
+      this[kHeadersMap].set(lowercaseName, { name, value })
+    }
+
+    if (lowercaseName === 'set-cookie') {
+      this.cookies ??= []
+      this.cookies.push(value)
+    }
+  }
+
+  // https://fetch.spec.whatwg.org/#concept-header-list-set
+  set (name, value) {
+    this[kHeadersSortedMap] = null
+    const lowercaseName = name.toLowerCase()
+
+    if (lowercaseName === 'set-cookie') {
+      this.cookies = [value]
+    }
+
+    // 1. If list contains name, then set the value of
+    //    the first such header to value and remove the
+    //    others.
+    // 2. Otherwise, append header (name, value) to list.
+    this[kHeadersMap].set(lowercaseName, { name, value })
+  }
+
+  // https://fetch.spec.whatwg.org/#concept-header-list-delete
+  delete (name) {
+    this[kHeadersSortedMap] = null
+
+    name = name.toLowerCase()
+
+    if (name === 'set-cookie') {
+      this.cookies = null
+    }
+
+    this[kHeadersMap].delete(name)
+  }
+
+  // https://fetch.spec.whatwg.org/#concept-header-list-get
+  get (name) {
+    const value = this[kHeadersMap].get(name.toLowerCase())
+
+    // 1. If list does not contain name, then return null.
+    // 2. Return the values of all headers in list whose name
+    //    is a byte-case-insensitive match for name,
+    //    separated from each other by 0x2C 0x20, in order.
+    return value === undefined ? null : value.value
+  }
+
+  * [Symbol.iterator] () {
+    // use the lowercased name
+    for (const [name, { value }] of this[kHeadersMap]) {
+      yield [name, value]
+    }
+  }
+
+  get entries () {
+    const headers = {}
+
+    if (this[kHeadersMap].size) {
+      for (const { name, value } of this[kHeadersMap].values()) {
+        headers[name] = value
+      }
+    }
+
+    return headers
+  }
+}
+
+// https://fetch.spec.whatwg.org/#headers-class
+class Headers {
+  constructor (init = undefined) {
+    if (init === kConstruct) {
+      return
+    }
+    this[kHeadersList] = new HeadersList()
+
+    // The new Headers(init) constructor steps are:
+
+    // 1. Set this’s guard to "none".
+    this[kGuard] = 'none'
+
+    // 2. If init is given, then fill this with init.
+    if (init !== undefined) {
+      init = webidl.converters.HeadersInit(init)
+      fill(this, init)
+    }
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-headers-append
+  append (name, value) {
+    webidl.brandCheck(this, Headers)
+
+    webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' })
+
+    name = webidl.converters.ByteString(name)
+    value = webidl.converters.ByteString(value)
+
+    return appendHeader(this, name, value)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-headers-delete
+  delete (name) {
+    webidl.brandCheck(this, Headers)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' })
+
+    name = webidl.converters.ByteString(name)
+
+    // 1. If name is not a header name, then throw a TypeError.
+    if (!isValidHeaderName(name)) {
+      throw webidl.errors.invalidArgument({
+        prefix: 'Headers.delete',
+        value: name,
+        type: 'header name'
+      })
+    }
+
+    // 2. If this’s guard is "immutable", then throw a TypeError.
+    // 3. Otherwise, if this’s guard is "request" and name is a
+    //    forbidden header name, return.
+    // 4. Otherwise, if this’s guard is "request-no-cors", name
+    //    is not a no-CORS-safelisted request-header name, and
+    //    name is not a privileged no-CORS request-header name,
+    //    return.
+    // 5. Otherwise, if this’s guard is "response" and name is
+    //    a forbidden response-header name, return.
+    // Note: undici does not implement forbidden header names
+    if (this[kGuard] === 'immutable') {
+      throw new TypeError('immutable')
+    } else if (this[kGuard] === 'request-no-cors') {
+      // TODO
+    }
+
+    // 6. If this’s header list does not contain name, then
+    //    return.
+    if (!this[kHeadersList].contains(name)) {
+      return
+    }
+
+    // 7. Delete name from this’s header list.
+    // 8. If this’s guard is "request-no-cors", then remove
+    //    privileged no-CORS request headers from this.
+    this[kHeadersList].delete(name)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-headers-get
+  get (name) {
+    webidl.brandCheck(this, Headers)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' })
+
+    name = webidl.converters.ByteString(name)
+
+    // 1. If name is not a header name, then throw a TypeError.
+    if (!isValidHeaderName(name)) {
+      throw webidl.errors.invalidArgument({
+        prefix: 'Headers.get',
+        value: name,
+        type: 'header name'
+      })
+    }
+
+    // 2. Return the result of getting name from this’s header
+    //    list.
+    return this[kHeadersList].get(name)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-headers-has
+  has (name) {
+    webidl.brandCheck(this, Headers)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' })
+
+    name = webidl.converters.ByteString(name)
+
+    // 1. If name is not a header name, then throw a TypeError.
+    if (!isValidHeaderName(name)) {
+      throw webidl.errors.invalidArgument({
+        prefix: 'Headers.has',
+        value: name,
+        type: 'header name'
+      })
+    }
+
+    // 2. Return true if this’s header list contains name;
+    //    otherwise false.
+    return this[kHeadersList].contains(name)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-headers-set
+  set (name, value) {
+    webidl.brandCheck(this, Headers)
+
+    webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' })
+
+    name = webidl.converters.ByteString(name)
+    value = webidl.converters.ByteString(value)
+
+    // 1. Normalize value.
+    value = headerValueNormalize(value)
+
+    // 2. If name is not a header name or value is not a
+    //    header value, then throw a TypeError.
+    if (!isValidHeaderName(name)) {
+      throw webidl.errors.invalidArgument({
+        prefix: 'Headers.set',
+        value: name,
+        type: 'header name'
+      })
+    } else if (!isValidHeaderValue(value)) {
+      throw webidl.errors.invalidArgument({
+        prefix: 'Headers.set',
+        value,
+        type: 'header value'
+      })
+    }
+
+    // 3. If this’s guard is "immutable", then throw a TypeError.
+    // 4. Otherwise, if this’s guard is "request" and name is a
+    //    forbidden header name, return.
+    // 5. Otherwise, if this’s guard is "request-no-cors" and
+    //    name/value is not a no-CORS-safelisted request-header,
+    //    return.
+    // 6. Otherwise, if this’s guard is "response" and name is a
+    //    forbidden response-header name, return.
+    // Note: undici does not implement forbidden header names
+    if (this[kGuard] === 'immutable') {
+      throw new TypeError('immutable')
+    } else if (this[kGuard] === 'request-no-cors') {
+      // TODO
+    }
+
+    // 7. Set (name, value) in this’s header list.
+    // 8. If this’s guard is "request-no-cors", then remove
+    //    privileged no-CORS request headers from this
+    this[kHeadersList].set(name, value)
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie
+  getSetCookie () {
+    webidl.brandCheck(this, Headers)
+
+    // 1. If this’s header list does not contain `Set-Cookie`, then return « ».
+    // 2. Return the values of all headers in this’s header list whose name is
+    //    a byte-case-insensitive match for `Set-Cookie`, in order.
+
+    const list = this[kHeadersList].cookies
+
+    if (list) {
+      return [...list]
+    }
+
+    return []
+  }
+
+  // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine
+  get [kHeadersSortedMap] () {
+    if (this[kHeadersList][kHeadersSortedMap]) {
+      return this[kHeadersList][kHeadersSortedMap]
+    }
+
+    // 1. Let headers be an empty list of headers with the key being the name
+    //    and value the value.
+    const headers = []
+
+    // 2. Let names be the result of convert header names to a sorted-lowercase
+    //    set with all the names of the headers in list.
+    const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1)
+    const cookies = this[kHeadersList].cookies
+
+    // 3. For each name of names:
+    for (let i = 0; i < names.length; ++i) {
+      const [name, value] = names[i]
+      // 1. If name is `set-cookie`, then:
+      if (name === 'set-cookie') {
+        // 1. Let values be a list of all values of headers in list whose name
+        //    is a byte-case-insensitive match for name, in order.
+
+        // 2. For each value of values:
+        // 1. Append (name, value) to headers.
+        for (let j = 0; j < cookies.length; ++j) {
+          headers.push([name, cookies[j]])
+        }
+      } else {
+        // 2. Otherwise:
+
+        // 1. Let value be the result of getting name from list.
+
+        // 2. Assert: value is non-null.
+        assert(value !== null)
+
+        // 3. Append (name, value) to headers.
+        headers.push([name, value])
+      }
+    }
+
+    this[kHeadersList][kHeadersSortedMap] = headers
+
+    // 4. Return headers.
+    return headers
+  }
+
+  keys () {
+    webidl.brandCheck(this, Headers)
+
+    if (this[kGuard] === 'immutable') {
+      const value = this[kHeadersSortedMap]
+      return makeIterator(() => value, 'Headers',
+        'key')
+    }
+
+    return makeIterator(
+      () => [...this[kHeadersSortedMap].values()],
+      'Headers',
+      'key'
+    )
+  }
+
+  values () {
+    webidl.brandCheck(this, Headers)
+
+    if (this[kGuard] === 'immutable') {
+      const value = this[kHeadersSortedMap]
+      return makeIterator(() => value, 'Headers',
+        'value')
+    }
+
+    return makeIterator(
+      () => [...this[kHeadersSortedMap].values()],
+      'Headers',
+      'value'
+    )
+  }
+
+  entries () {
+    webidl.brandCheck(this, Headers)
+
+    if (this[kGuard] === 'immutable') {
+      const value = this[kHeadersSortedMap]
+      return makeIterator(() => value, 'Headers',
+        'key+value')
+    }
+
+    return makeIterator(
+      () => [...this[kHeadersSortedMap].values()],
+      'Headers',
+      'key+value'
+    )
+  }
+
+  /**
+   * @param {(value: string, key: string, self: Headers) => void} callbackFn
+   * @param {unknown} thisArg
+   */
+  forEach (callbackFn, thisArg = globalThis) {
+    webidl.brandCheck(this, Headers)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' })
+
+    if (typeof callbackFn !== 'function') {
+      throw new TypeError(
+        "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'."
+      )
+    }
+
+    for (const [key, value] of this) {
+      callbackFn.apply(thisArg, [value, key, this])
+    }
+  }
+
+  [Symbol.for('nodejs.util.inspect.custom')] () {
+    webidl.brandCheck(this, Headers)
+
+    return this[kHeadersList]
+  }
+}
+
+Headers.prototype[Symbol.iterator] = Headers.prototype.entries
+
+Object.defineProperties(Headers.prototype, {
+  append: kEnumerableProperty,
+  delete: kEnumerableProperty,
+  get: kEnumerableProperty,
+  has: kEnumerableProperty,
+  set: kEnumerableProperty,
+  getSetCookie: kEnumerableProperty,
+  keys: kEnumerableProperty,
+  values: kEnumerableProperty,
+  entries: kEnumerableProperty,
+  forEach: kEnumerableProperty,
+  [Symbol.iterator]: { enumerable: false },
+  [Symbol.toStringTag]: {
+    value: 'Headers',
+    configurable: true
+  }
+})
+
+webidl.converters.HeadersInit = function (V) {
+  if (webidl.util.Type(V) === 'Object') {
+    if (V[Symbol.iterator]) {
+      return webidl.converters['sequence<sequence<ByteString>>'](V)
+    }
+
+    return webidl.converters['record<ByteString, ByteString>'](V)
+  }
+
+  throw webidl.errors.conversionFailed({
+    prefix: 'Headers constructor',
+    argument: 'Argument 1',
+    types: ['sequence<sequence<ByteString>>', 'record<ByteString, ByteString>']
+  })
+}
+
+module.exports = {
+  fill,
+  Headers,
+  HeadersList
+}
+
+
+/***/ }),
+
+/***/ 8358:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+// https://github.com/Ethan-Arrowood/undici-fetch
+
+
+
+const {
+  Response,
+  makeNetworkError,
+  makeAppropriateNetworkError,
+  filterResponse,
+  makeResponse
+} = __nccwpck_require__(5683)
+const { Headers } = __nccwpck_require__(2908)
+const { Request, makeRequest } = __nccwpck_require__(5767)
+const zlib = __nccwpck_require__(3106)
+const {
+  bytesMatch,
+  makePolicyContainer,
+  clonePolicyContainer,
+  requestBadPort,
+  TAOCheck,
+  appendRequestOriginHeader,
+  responseLocationURL,
+  requestCurrentURL,
+  setRequestReferrerPolicyOnRedirect,
+  tryUpgradeRequestToAPotentiallyTrustworthyURL,
+  createOpaqueTimingInfo,
+  appendFetchMetadata,
+  corsCheck,
+  crossOriginResourcePolicyCheck,
+  determineRequestsReferrer,
+  coarsenedSharedCurrentTime,
+  createDeferredPromise,
+  isBlobLike,
+  sameOrigin,
+  isCancelled,
+  isAborted,
+  isErrorLike,
+  fullyReadBody,
+  readableStreamClose,
+  isomorphicEncode,
+  urlIsLocal,
+  urlIsHttpHttpsScheme,
+  urlHasHttpsScheme
+} = __nccwpck_require__(2696)
+const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(8323)
+const assert = __nccwpck_require__(2613)
+const { safelyExtractBody } = __nccwpck_require__(1380)
+const {
+  redirectStatusSet,
+  nullBodyStatus,
+  safeMethodsSet,
+  requestBodyHeader,
+  subresourceSet,
+  DOMException
+} = __nccwpck_require__(4135)
+const { kHeadersList } = __nccwpck_require__(4856)
+const EE = __nccwpck_require__(4434)
+const { Readable, pipeline } = __nccwpck_require__(2203)
+const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3465)
+const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(3911)
+const { TransformStream } = __nccwpck_require__(3774)
+const { getGlobalDispatcher } = __nccwpck_require__(7882)
+const { webidl } = __nccwpck_require__(29)
+const { STATUS_CODES } = __nccwpck_require__(8611)
+const GET_OR_HEAD = ['GET', 'HEAD']
+
+/** @type {import('buffer').resolveObjectURL} */
+let resolveObjectURL
+let ReadableStream = globalThis.ReadableStream
+
+class Fetch extends EE {
+  constructor (dispatcher) {
+    super()
+
+    this.dispatcher = dispatcher
+    this.connection = null
+    this.dump = false
+    this.state = 'ongoing'
+    // 2 terminated listeners get added per request,
+    // but only 1 gets removed. If there are 20 redirects,
+    // 21 listeners will be added.
+    // See https://github.com/nodejs/undici/issues/1711
+    // TODO (fix): Find and fix root cause for leaked listener.
+    this.setMaxListeners(21)
+  }
+
+  terminate (reason) {
+    if (this.state !== 'ongoing') {
+      return
+    }
+
+    this.state = 'terminated'
+    this.connection?.destroy(reason)
+    this.emit('terminated', reason)
+  }
+
+  // https://fetch.spec.whatwg.org/#fetch-controller-abort
+  abort (error) {
+    if (this.state !== 'ongoing') {
+      return
+    }
+
+    // 1. Set controller’s state to "aborted".
+    this.state = 'aborted'
+
+    // 2. Let fallbackError be an "AbortError" DOMException.
+    // 3. Set error to fallbackError if it is not given.
+    if (!error) {
+      error = new DOMException('The operation was aborted.', 'AbortError')
+    }
+
+    // 4. Let serializedError be StructuredSerialize(error).
+    //    If that threw an exception, catch it, and let
+    //    serializedError be StructuredSerialize(fallbackError).
+
+    // 5. Set controller’s serialized abort reason to serializedError.
+    this.serializedAbortReason = error
+
+    this.connection?.destroy(error)
+    this.emit('terminated', error)
+  }
+}
+
+// https://fetch.spec.whatwg.org/#fetch-method
+function fetch (input, init = {}) {
+  webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' })
+
+  // 1. Let p be a new promise.
+  const p = createDeferredPromise()
+
+  // 2. Let requestObject be the result of invoking the initial value of
+  // Request as constructor with input and init as arguments. If this throws
+  // an exception, reject p with it and return p.
+  let requestObject
+
+  try {
+    requestObject = new Request(input, init)
+  } catch (e) {
+    p.reject(e)
+    return p.promise
+  }
+
+  // 3. Let request be requestObject’s request.
+  const request = requestObject[kState]
+
+  // 4. If requestObject’s signal’s aborted flag is set, then:
+  if (requestObject.signal.aborted) {
+    // 1. Abort the fetch() call with p, request, null, and
+    //    requestObject’s signal’s abort reason.
+    abortFetch(p, request, null, requestObject.signal.reason)
+
+    // 2. Return p.
+    return p.promise
+  }
+
+  // 5. Let globalObject be request’s client’s global object.
+  const globalObject = request.client.globalObject
+
+  // 6. If globalObject is a ServiceWorkerGlobalScope object, then set
+  // request’s service-workers mode to "none".
+  if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') {
+    request.serviceWorkers = 'none'
+  }
+
+  // 7. Let responseObject be null.
+  let responseObject = null
+
+  // 8. Let relevantRealm be this’s relevant Realm.
+  const relevantRealm = null
+
+  // 9. Let locallyAborted be false.
+  let locallyAborted = false
+
+  // 10. Let controller be null.
+  let controller = null
+
+  // 11. Add the following abort steps to requestObject’s signal:
+  addAbortListener(
+    requestObject.signal,
+    () => {
+      // 1. Set locallyAborted to true.
+      locallyAborted = true
+
+      // 2. Assert: controller is non-null.
+      assert(controller != null)
+
+      // 3. Abort controller with requestObject’s signal’s abort reason.
+      controller.abort(requestObject.signal.reason)
+
+      // 4. Abort the fetch() call with p, request, responseObject,
+      //    and requestObject’s signal’s abort reason.
+      abortFetch(p, request, responseObject, requestObject.signal.reason)
+    }
+  )
+
+  // 12. Let handleFetchDone given response response be to finalize and
+  // report timing with response, globalObject, and "fetch".
+  const handleFetchDone = (response) =>
+    finalizeAndReportTiming(response, 'fetch')
+
+  // 13. Set controller to the result of calling fetch given request,
+  // with processResponseEndOfBody set to handleFetchDone, and processResponse
+  // given response being these substeps:
+
+  const processResponse = (response) => {
+    // 1. If locallyAborted is true, terminate these substeps.
+    if (locallyAborted) {
+      return Promise.resolve()
+    }
+
+    // 2. If response’s aborted flag is set, then:
+    if (response.aborted) {
+      // 1. Let deserializedError be the result of deserialize a serialized
+      //    abort reason given controller’s serialized abort reason and
+      //    relevantRealm.
+
+      // 2. Abort the fetch() call with p, request, responseObject, and
+      //    deserializedError.
+
+      abortFetch(p, request, responseObject, controller.serializedAbortReason)
+      return Promise.resolve()
+    }
+
+    // 3. If response is a network error, then reject p with a TypeError
+    // and terminate these substeps.
+    if (response.type === 'error') {
+      p.reject(
+        Object.assign(new TypeError('fetch failed'), { cause: response.error })
+      )
+      return Promise.resolve()
+    }
+
+    // 4. Set responseObject to the result of creating a Response object,
+    // given response, "immutable", and relevantRealm.
+    responseObject = new Response()
+    responseObject[kState] = response
+    responseObject[kRealm] = relevantRealm
+    responseObject[kHeaders][kHeadersList] = response.headersList
+    responseObject[kHeaders][kGuard] = 'immutable'
+    responseObject[kHeaders][kRealm] = relevantRealm
+
+    // 5. Resolve p with responseObject.
+    p.resolve(responseObject)
+  }
+
+  controller = fetching({
+    request,
+    processResponseEndOfBody: handleFetchDone,
+    processResponse,
+    dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici
+  })
+
+  // 14. Return p.
+  return p.promise
+}
+
+// https://fetch.spec.whatwg.org/#finalize-and-report-timing
+function finalizeAndReportTiming (response, initiatorType = 'other') {
+  // 1. If response is an aborted network error, then return.
+  if (response.type === 'error' && response.aborted) {
+    return
+  }
+
+  // 2. If response’s URL list is null or empty, then return.
+  if (!response.urlList?.length) {
+    return
+  }
+
+  // 3. Let originalURL be response’s URL list[0].
+  const originalURL = response.urlList[0]
+
+  // 4. Let timingInfo be response’s timing info.
+  let timingInfo = response.timingInfo
+
+  // 5. Let cacheState be response’s cache state.
+  let cacheState = response.cacheState
+
+  // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return.
+  if (!urlIsHttpHttpsScheme(originalURL)) {
+    return
+  }
+
+  // 7. If timingInfo is null, then return.
+  if (timingInfo === null) {
+    return
+  }
+
+  // 8. If response’s timing allow passed flag is not set, then:
+  if (!response.timingAllowPassed) {
+    //  1. Set timingInfo to a the result of creating an opaque timing info for timingInfo.
+    timingInfo = createOpaqueTimingInfo({
+      startTime: timingInfo.startTime
+    })
+
+    //  2. Set cacheState to the empty string.
+    cacheState = ''
+  }
+
+  // 9. Set timingInfo’s end time to the coarsened shared current time
+  // given global’s relevant settings object’s cross-origin isolated
+  // capability.
+  // TODO: given global’s relevant settings object’s cross-origin isolated
+  // capability?
+  timingInfo.endTime = coarsenedSharedCurrentTime()
+
+  // 10. Set response’s timing info to timingInfo.
+  response.timingInfo = timingInfo
+
+  // 11. Mark resource timing for timingInfo, originalURL, initiatorType,
+  // global, and cacheState.
+  markResourceTiming(
+    timingInfo,
+    originalURL,
+    initiatorType,
+    globalThis,
+    cacheState
+  )
+}
+
+// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing
+function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) {
+  if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) {
+    performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState)
+  }
+}
+
+// https://fetch.spec.whatwg.org/#abort-fetch
+function abortFetch (p, request, responseObject, error) {
+  // Note: AbortSignal.reason was added in node v17.2.0
+  // which would give us an undefined error to reject with.
+  // Remove this once node v16 is no longer supported.
+  if (!error) {
+    error = new DOMException('The operation was aborted.', 'AbortError')
+  }
+
+  // 1. Reject promise with error.
+  p.reject(error)
+
+  // 2. If request’s body is not null and is readable, then cancel request’s
+  // body with error.
+  if (request.body != null && isReadable(request.body?.stream)) {
+    request.body.stream.cancel(error).catch((err) => {
+      if (err.code === 'ERR_INVALID_STATE') {
+        // Node bug?
+        return
+      }
+      throw err
+    })
+  }
+
+  // 3. If responseObject is null, then return.
+  if (responseObject == null) {
+    return
+  }
+
+  // 4. Let response be responseObject’s response.
+  const response = responseObject[kState]
+
+  // 5. If response’s body is not null and is readable, then error response’s
+  // body with error.
+  if (response.body != null && isReadable(response.body?.stream)) {
+    response.body.stream.cancel(error).catch((err) => {
+      if (err.code === 'ERR_INVALID_STATE') {
+        // Node bug?
+        return
+      }
+      throw err
+    })
+  }
+}
+
+// https://fetch.spec.whatwg.org/#fetching
+function fetching ({
+  request,
+  processRequestBodyChunkLength,
+  processRequestEndOfBody,
+  processResponse,
+  processResponseEndOfBody,
+  processResponseConsumeBody,
+  useParallelQueue = false,
+  dispatcher // undici
+}) {
+  // 1. Let taskDestination be null.
+  let taskDestination = null
+
+  // 2. Let crossOriginIsolatedCapability be false.
+  let crossOriginIsolatedCapability = false
+
+  // 3. If request’s client is non-null, then:
+  if (request.client != null) {
+    // 1. Set taskDestination to request’s client’s global object.
+    taskDestination = request.client.globalObject
+
+    // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin
+    // isolated capability.
+    crossOriginIsolatedCapability =
+      request.client.crossOriginIsolatedCapability
+  }
+
+  // 4. If useParallelQueue is true, then set taskDestination to the result of
+  // starting a new parallel queue.
+  // TODO
+
+  // 5. Let timingInfo be a new fetch timing info whose start time and
+  // post-redirect start time are the coarsened shared current time given
+  // crossOriginIsolatedCapability.
+  const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability)
+  const timingInfo = createOpaqueTimingInfo({
+    startTime: currenTime
+  })
+
+  // 6. Let fetchParams be a new fetch params whose
+  // request is request,
+  // timing info is timingInfo,
+  // process request body chunk length is processRequestBodyChunkLength,
+  // process request end-of-body is processRequestEndOfBody,
+  // process response is processResponse,
+  // process response consume body is processResponseConsumeBody,
+  // process response end-of-body is processResponseEndOfBody,
+  // task destination is taskDestination,
+  // and cross-origin isolated capability is crossOriginIsolatedCapability.
+  const fetchParams = {
+    controller: new Fetch(dispatcher),
+    request,
+    timingInfo,
+    processRequestBodyChunkLength,
+    processRequestEndOfBody,
+    processResponse,
+    processResponseConsumeBody,
+    processResponseEndOfBody,
+    taskDestination,
+    crossOriginIsolatedCapability
+  }
+
+  // 7. If request’s body is a byte sequence, then set request’s body to
+  //    request’s body as a body.
+  // NOTE: Since fetching is only called from fetch, body should already be
+  // extracted.
+  assert(!request.body || request.body.stream)
+
+  // 8. If request’s window is "client", then set request’s window to request’s
+  // client, if request’s client’s global object is a Window object; otherwise
+  // "no-window".
+  if (request.window === 'client') {
+    // TODO: What if request.client is null?
+    request.window =
+      request.client?.globalObject?.constructor?.name === 'Window'
+        ? request.client
+        : 'no-window'
+  }
+
+  // 9. If request’s origin is "client", then set request’s origin to request’s
+  // client’s origin.
+  if (request.origin === 'client') {
+    // TODO: What if request.client is null?
+    request.origin = request.client?.origin
+  }
+
+  // 10. If all of the following conditions are true:
+  // TODO
+
+  // 11. If request’s policy container is "client", then:
+  if (request.policyContainer === 'client') {
+    // 1. If request’s client is non-null, then set request’s policy
+    // container to a clone of request’s client’s policy container. [HTML]
+    if (request.client != null) {
+      request.policyContainer = clonePolicyContainer(
+        request.client.policyContainer
+      )
+    } else {
+      // 2. Otherwise, set request’s policy container to a new policy
+      // container.
+      request.policyContainer = makePolicyContainer()
+    }
+  }
+
+  // 12. If request’s header list does not contain `Accept`, then:
+  if (!request.headersList.contains('accept')) {
+    // 1. Let value be `*/*`.
+    const value = '*/*'
+
+    // 2. A user agent should set value to the first matching statement, if
+    // any, switching on request’s destination:
+    // "document"
+    // "frame"
+    // "iframe"
+    // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8`
+    // "image"
+    // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5`
+    // "style"
+    // `text/css,*/*;q=0.1`
+    // TODO
+
+    // 3. Append `Accept`/value to request’s header list.
+    request.headersList.append('accept', value)
+  }
+
+  // 13. If request’s header list does not contain `Accept-Language`, then
+  // user agents should append `Accept-Language`/an appropriate value to
+  // request’s header list.
+  if (!request.headersList.contains('accept-language')) {
+    request.headersList.append('accept-language', '*')
+  }
+
+  // 14. If request’s priority is null, then use request’s initiator and
+  // destination appropriately in setting request’s priority to a
+  // user-agent-defined object.
+  if (request.priority === null) {
+    // TODO
+  }
+
+  // 15. If request is a subresource request, then:
+  if (subresourceSet.has(request.destination)) {
+    // TODO
+  }
+
+  // 16. Run main fetch given fetchParams.
+  mainFetch(fetchParams)
+    .catch(err => {
+      fetchParams.controller.terminate(err)
+    })
+
+  // 17. Return fetchParam's controller
+  return fetchParams.controller
+}
+
+// https://fetch.spec.whatwg.org/#concept-main-fetch
+async function mainFetch (fetchParams, recursive = false) {
+  // 1. Let request be fetchParams’s request.
+  const request = fetchParams.request
+
+  // 2. Let response be null.
+  let response = null
+
+  // 3. If request’s local-URLs-only flag is set and request’s current URL is
+  // not local, then set response to a network error.
+  if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) {
+    response = makeNetworkError('local URLs only')
+  }
+
+  // 4. Run report Content Security Policy violations for request.
+  // TODO
+
+  // 5. Upgrade request to a potentially trustworthy URL, if appropriate.
+  tryUpgradeRequestToAPotentiallyTrustworthyURL(request)
+
+  // 6. If should request be blocked due to a bad port, should fetching request
+  // be blocked as mixed content, or should request be blocked by Content
+  // Security Policy returns blocked, then set response to a network error.
+  if (requestBadPort(request) === 'blocked') {
+    response = makeNetworkError('bad port')
+  }
+  // TODO: should fetching request be blocked as mixed content?
+  // TODO: should request be blocked by Content Security Policy?
+
+  // 7. If request’s referrer policy is the empty string, then set request’s
+  // referrer policy to request’s policy container’s referrer policy.
+  if (request.referrerPolicy === '') {
+    request.referrerPolicy = request.policyContainer.referrerPolicy
+  }
+
+  // 8. If request’s referrer is not "no-referrer", then set request’s
+  // referrer to the result of invoking determine request’s referrer.
+  if (request.referrer !== 'no-referrer') {
+    request.referrer = determineRequestsReferrer(request)
+  }
+
+  // 9. Set request’s current URL’s scheme to "https" if all of the following
+  // conditions are true:
+  // - request’s current URL’s scheme is "http"
+  // - request’s current URL’s host is a domain
+  // - Matching request’s current URL’s host per Known HSTS Host Domain Name
+  //   Matching results in either a superdomain match with an asserted
+  //   includeSubDomains directive or a congruent match (with or without an
+  //   asserted includeSubDomains directive). [HSTS]
+  // TODO
+
+  // 10. If recursive is false, then run the remaining steps in parallel.
+  // TODO
+
+  // 11. If response is null, then set response to the result of running
+  // the steps corresponding to the first matching statement:
+  if (response === null) {
+    response = await (async () => {
+      const currentURL = requestCurrentURL(request)
+
+      if (
+        // - request’s current URL’s origin is same origin with request’s origin,
+        //   and request’s response tainting is "basic"
+        (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') ||
+        // request’s current URL’s scheme is "data"
+        (currentURL.protocol === 'data:') ||
+        // - request’s mode is "navigate" or "websocket"
+        (request.mode === 'navigate' || request.mode === 'websocket')
+      ) {
+        // 1. Set request’s response tainting to "basic".
+        request.responseTainting = 'basic'
+
+        // 2. Return the result of running scheme fetch given fetchParams.
+        return await schemeFetch(fetchParams)
+      }
+
+      // request’s mode is "same-origin"
+      if (request.mode === 'same-origin') {
+        // 1. Return a network error.
+        return makeNetworkError('request mode cannot be "same-origin"')
+      }
+
+      // request’s mode is "no-cors"
+      if (request.mode === 'no-cors') {
+        // 1. If request’s redirect mode is not "follow", then return a network
+        // error.
+        if (request.redirect !== 'follow') {
+          return makeNetworkError(
+            'redirect mode cannot be "follow" for "no-cors" request'
+          )
+        }
+
+        // 2. Set request’s response tainting to "opaque".
+        request.responseTainting = 'opaque'
+
+        // 3. Return the result of running scheme fetch given fetchParams.
+        return await schemeFetch(fetchParams)
+      }
+
+      // request’s current URL’s scheme is not an HTTP(S) scheme
+      if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) {
+        // Return a network error.
+        return makeNetworkError('URL scheme must be a HTTP(S) scheme')
+      }
+
+      // - request’s use-CORS-preflight flag is set
+      // - request’s unsafe-request flag is set and either request’s method is
+      //   not a CORS-safelisted method or CORS-unsafe request-header names with
+      //   request’s header list is not empty
+      //    1. Set request’s response tainting to "cors".
+      //    2. Let corsWithPreflightResponse be the result of running HTTP fetch
+      //    given fetchParams and true.
+      //    3. If corsWithPreflightResponse is a network error, then clear cache
+      //    entries using request.
+      //    4. Return corsWithPreflightResponse.
+      // TODO
+
+      // Otherwise
+      //    1. Set request’s response tainting to "cors".
+      request.responseTainting = 'cors'
+
+      //    2. Return the result of running HTTP fetch given fetchParams.
+      return await httpFetch(fetchParams)
+    })()
+  }
+
+  // 12. If recursive is true, then return response.
+  if (recursive) {
+    return response
+  }
+
+  // 13. If response is not a network error and response is not a filtered
+  // response, then:
+  if (response.status !== 0 && !response.internalResponse) {
+    // If request’s response tainting is "cors", then:
+    if (request.responseTainting === 'cors') {
+      // 1. Let headerNames be the result of extracting header list values
+      // given `Access-Control-Expose-Headers` and response’s header list.
+      // TODO
+      // 2. If request’s credentials mode is not "include" and headerNames
+      // contains `*`, then set response’s CORS-exposed header-name list to
+      // all unique header names in response’s header list.
+      // TODO
+      // 3. Otherwise, if headerNames is not null or failure, then set
+      // response’s CORS-exposed header-name list to headerNames.
+      // TODO
+    }
+
+    // Set response to the following filtered response with response as its
+    // internal response, depending on request’s response tainting:
+    if (request.responseTainting === 'basic') {
+      response = filterResponse(response, 'basic')
+    } else if (request.responseTainting === 'cors') {
+      response = filterResponse(response, 'cors')
+    } else if (request.responseTainting === 'opaque') {
+      response = filterResponse(response, 'opaque')
+    } else {
+      assert(false)
+    }
+  }
+
+  // 14. Let internalResponse be response, if response is a network error,
+  // and response’s internal response otherwise.
+  let internalResponse =
+    response.status === 0 ? response : response.internalResponse
+
+  // 15. If internalResponse’s URL list is empty, then set it to a clone of
+  // request’s URL list.
+  if (internalResponse.urlList.length === 0) {
+    internalResponse.urlList.push(...request.urlList)
+  }
+
+  // 16. If request’s timing allow failed flag is unset, then set
+  // internalResponse’s timing allow passed flag.
+  if (!request.timingAllowFailed) {
+    response.timingAllowPassed = true
+  }
+
+  // 17. If response is not a network error and any of the following returns
+  // blocked
+  // - should internalResponse to request be blocked as mixed content
+  // - should internalResponse to request be blocked by Content Security Policy
+  // - should internalResponse to request be blocked due to its MIME type
+  // - should internalResponse to request be blocked due to nosniff
+  // TODO
+
+  // 18. If response’s type is "opaque", internalResponse’s status is 206,
+  // internalResponse’s range-requested flag is set, and request’s header
+  // list does not contain `Range`, then set response and internalResponse
+  // to a network error.
+  if (
+    response.type === 'opaque' &&
+    internalResponse.status === 206 &&
+    internalResponse.rangeRequested &&
+    !request.headers.contains('range')
+  ) {
+    response = internalResponse = makeNetworkError()
+  }
+
+  // 19. If response is not a network error and either request’s method is
+  // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status,
+  // set internalResponse’s body to null and disregard any enqueuing toward
+  // it (if any).
+  if (
+    response.status !== 0 &&
+    (request.method === 'HEAD' ||
+      request.method === 'CONNECT' ||
+      nullBodyStatus.includes(internalResponse.status))
+  ) {
+    internalResponse.body = null
+    fetchParams.controller.dump = true
+  }
+
+  // 20. If request’s integrity metadata is not the empty string, then:
+  if (request.integrity) {
+    // 1. Let processBodyError be this step: run fetch finale given fetchParams
+    // and a network error.
+    const processBodyError = (reason) =>
+      fetchFinale(fetchParams, makeNetworkError(reason))
+
+    // 2. If request’s response tainting is "opaque", or response’s body is null,
+    // then run processBodyError and abort these steps.
+    if (request.responseTainting === 'opaque' || response.body == null) {
+      processBodyError(response.error)
+      return
+    }
+
+    // 3. Let processBody given bytes be these steps:
+    const processBody = (bytes) => {
+      // 1. If bytes do not match request’s integrity metadata,
+      // then run processBodyError and abort these steps. [SRI]
+      if (!bytesMatch(bytes, request.integrity)) {
+        processBodyError('integrity mismatch')
+        return
+      }
+
+      // 2. Set response’s body to bytes as a body.
+      response.body = safelyExtractBody(bytes)[0]
+
+      // 3. Run fetch finale given fetchParams and response.
+      fetchFinale(fetchParams, response)
+    }
+
+    // 4. Fully read response’s body given processBody and processBodyError.
+    await fullyReadBody(response.body, processBody, processBodyError)
+  } else {
+    // 21. Otherwise, run fetch finale given fetchParams and response.
+    fetchFinale(fetchParams, response)
+  }
+}
+
+// https://fetch.spec.whatwg.org/#concept-scheme-fetch
+// given a fetch params fetchParams
+function schemeFetch (fetchParams) {
+  // Note: since the connection is destroyed on redirect, which sets fetchParams to a
+  // cancelled state, we do not want this condition to trigger *unless* there have been
+  // no redirects. See https://github.com/nodejs/undici/issues/1776
+  // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+  if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) {
+    return Promise.resolve(makeAppropriateNetworkError(fetchParams))
+  }
+
+  // 2. Let request be fetchParams’s request.
+  const { request } = fetchParams
+
+  const { protocol: scheme } = requestCurrentURL(request)
+
+  // 3. Switch on request’s current URL’s scheme and run the associated steps:
+  switch (scheme) {
+    case 'about:': {
+      // If request’s current URL’s path is the string "blank", then return a new response
+      // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) »,
+      // and body is the empty byte sequence as a body.
+
+      // Otherwise, return a network error.
+      return Promise.resolve(makeNetworkError('about scheme is not supported'))
+    }
+    case 'blob:': {
+      if (!resolveObjectURL) {
+        resolveObjectURL = (__nccwpck_require__(181).resolveObjectURL)
+      }
+
+      // 1. Let blobURLEntry be request’s current URL’s blob URL entry.
+      const blobURLEntry = requestCurrentURL(request)
+
+      // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56
+      // Buffer.resolveObjectURL does not ignore URL queries.
+      if (blobURLEntry.search.length !== 0) {
+        return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.'))
+      }
+
+      const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString())
+
+      // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s
+      //    object is not a Blob object, then return a network error.
+      if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) {
+        return Promise.resolve(makeNetworkError('invalid method'))
+      }
+
+      // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object.
+      const bodyWithType = safelyExtractBody(blobURLEntryObject)
+
+      // 4. Let body be bodyWithType’s body.
+      const body = bodyWithType[0]
+
+      // 5. Let length be body’s length, serialized and isomorphic encoded.
+      const length = isomorphicEncode(`${body.length}`)
+
+      // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence.
+      const type = bodyWithType[1] ?? ''
+
+      // 7. Return a new response whose status message is `OK`, header list is
+      //    « (`Content-Length`, length), (`Content-Type`, type) », and body is body.
+      const response = makeResponse({
+        statusText: 'OK',
+        headersList: [
+          ['content-length', { name: 'Content-Length', value: length }],
+          ['content-type', { name: 'Content-Type', value: type }]
+        ]
+      })
+
+      response.body = body
+
+      return Promise.resolve(response)
+    }
+    case 'data:': {
+      // 1. Let dataURLStruct be the result of running the
+      //    data: URL processor on request’s current URL.
+      const currentURL = requestCurrentURL(request)
+      const dataURLStruct = dataURLProcessor(currentURL)
+
+      // 2. If dataURLStruct is failure, then return a
+      //    network error.
+      if (dataURLStruct === 'failure') {
+        return Promise.resolve(makeNetworkError('failed to fetch the data URL'))
+      }
+
+      // 3. Let mimeType be dataURLStruct’s MIME type, serialized.
+      const mimeType = serializeAMimeType(dataURLStruct.mimeType)
+
+      // 4. Return a response whose status message is `OK`,
+      //    header list is « (`Content-Type`, mimeType) »,
+      //    and body is dataURLStruct’s body as a body.
+      return Promise.resolve(makeResponse({
+        statusText: 'OK',
+        headersList: [
+          ['content-type', { name: 'Content-Type', value: mimeType }]
+        ],
+        body: safelyExtractBody(dataURLStruct.body)[0]
+      }))
+    }
+    case 'file:': {
+      // For now, unfortunate as it is, file URLs are left as an exercise for the reader.
+      // When in doubt, return a network error.
+      return Promise.resolve(makeNetworkError('not implemented... yet...'))
+    }
+    case 'http:':
+    case 'https:': {
+      // Return the result of running HTTP fetch given fetchParams.
+
+      return httpFetch(fetchParams)
+        .catch((err) => makeNetworkError(err))
+    }
+    default: {
+      return Promise.resolve(makeNetworkError('unknown scheme'))
+    }
+  }
+}
+
+// https://fetch.spec.whatwg.org/#finalize-response
+function finalizeResponse (fetchParams, response) {
+  // 1. Set fetchParams’s request’s done flag.
+  fetchParams.request.done = true
+
+  // 2, If fetchParams’s process response done is not null, then queue a fetch
+  // task to run fetchParams’s process response done given response, with
+  // fetchParams’s task destination.
+  if (fetchParams.processResponseDone != null) {
+    queueMicrotask(() => fetchParams.processResponseDone(response))
+  }
+}
+
+// https://fetch.spec.whatwg.org/#fetch-finale
+function fetchFinale (fetchParams, response) {
+  // 1. If response is a network error, then:
+  if (response.type === 'error') {
+    // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ».
+    response.urlList = [fetchParams.request.urlList[0]]
+
+    // 2. Set response’s timing info to the result of creating an opaque timing
+    // info for fetchParams’s timing info.
+    response.timingInfo = createOpaqueTimingInfo({
+      startTime: fetchParams.timingInfo.startTime
+    })
+  }
+
+  // 2. Let processResponseEndOfBody be the following steps:
+  const processResponseEndOfBody = () => {
+    // 1. Set fetchParams’s request’s done flag.
+    fetchParams.request.done = true
+
+    // If fetchParams’s process response end-of-body is not null,
+    // then queue a fetch task to run fetchParams’s process response
+    // end-of-body given response with fetchParams’s task destination.
+    if (fetchParams.processResponseEndOfBody != null) {
+      queueMicrotask(() => fetchParams.processResponseEndOfBody(response))
+    }
+  }
+
+  // 3. If fetchParams’s process response is non-null, then queue a fetch task
+  // to run fetchParams’s process response given response, with fetchParams’s
+  // task destination.
+  if (fetchParams.processResponse != null) {
+    queueMicrotask(() => fetchParams.processResponse(response))
+  }
+
+  // 4. If response’s body is null, then run processResponseEndOfBody.
+  if (response.body == null) {
+    processResponseEndOfBody()
+  } else {
+  // 5. Otherwise:
+
+    // 1. Let transformStream be a new a TransformStream.
+
+    // 2. Let identityTransformAlgorithm be an algorithm which, given chunk,
+    // enqueues chunk in transformStream.
+    const identityTransformAlgorithm = (chunk, controller) => {
+      controller.enqueue(chunk)
+    }
+
+    // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm
+    // and flushAlgorithm set to processResponseEndOfBody.
+    const transformStream = new TransformStream({
+      start () {},
+      transform: identityTransformAlgorithm,
+      flush: processResponseEndOfBody
+    }, {
+      size () {
+        return 1
+      }
+    }, {
+      size () {
+        return 1
+      }
+    })
+
+    // 4. Set response’s body to the result of piping response’s body through transformStream.
+    response.body = { stream: response.body.stream.pipeThrough(transformStream) }
+  }
+
+  // 6. If fetchParams’s process response consume body is non-null, then:
+  if (fetchParams.processResponseConsumeBody != null) {
+    // 1. Let processBody given nullOrBytes be this step: run fetchParams’s
+    // process response consume body given response and nullOrBytes.
+    const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes)
+
+    // 2. Let processBodyError be this step: run fetchParams’s process
+    // response consume body given response and failure.
+    const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure)
+
+    // 3. If response’s body is null, then queue a fetch task to run processBody
+    // given null, with fetchParams’s task destination.
+    if (response.body == null) {
+      queueMicrotask(() => processBody(null))
+    } else {
+      // 4. Otherwise, fully read response’s body given processBody, processBodyError,
+      // and fetchParams’s task destination.
+      return fullyReadBody(response.body, processBody, processBodyError)
+    }
+    return Promise.resolve()
+  }
+}
+
+// https://fetch.spec.whatwg.org/#http-fetch
+async function httpFetch (fetchParams) {
+  // 1. Let request be fetchParams’s request.
+  const request = fetchParams.request
+
+  // 2. Let response be null.
+  let response = null
+
+  // 3. Let actualResponse be null.
+  let actualResponse = null
+
+  // 4. Let timingInfo be fetchParams’s timing info.
+  const timingInfo = fetchParams.timingInfo
+
+  // 5. If request’s service-workers mode is "all", then:
+  if (request.serviceWorkers === 'all') {
+    // TODO
+  }
+
+  // 6. If response is null, then:
+  if (response === null) {
+    // 1. If makeCORSPreflight is true and one of these conditions is true:
+    // TODO
+
+    // 2. If request’s redirect mode is "follow", then set request’s
+    // service-workers mode to "none".
+    if (request.redirect === 'follow') {
+      request.serviceWorkers = 'none'
+    }
+
+    // 3. Set response and actualResponse to the result of running
+    // HTTP-network-or-cache fetch given fetchParams.
+    actualResponse = response = await httpNetworkOrCacheFetch(fetchParams)
+
+    // 4. If request’s response tainting is "cors" and a CORS check
+    // for request and response returns failure, then return a network error.
+    if (
+      request.responseTainting === 'cors' &&
+      corsCheck(request, response) === 'failure'
+    ) {
+      return makeNetworkError('cors failure')
+    }
+
+    // 5. If the TAO check for request and response returns failure, then set
+    // request’s timing allow failed flag.
+    if (TAOCheck(request, response) === 'failure') {
+      request.timingAllowFailed = true
+    }
+  }
+
+  // 7. If either request’s response tainting or response’s type
+  // is "opaque", and the cross-origin resource policy check with
+  // request’s origin, request’s client, request’s destination,
+  // and actualResponse returns blocked, then return a network error.
+  if (
+    (request.responseTainting === 'opaque' || response.type === 'opaque') &&
+    crossOriginResourcePolicyCheck(
+      request.origin,
+      request.client,
+      request.destination,
+      actualResponse
+    ) === 'blocked'
+  ) {
+    return makeNetworkError('blocked')
+  }
+
+  // 8. If actualResponse’s status is a redirect status, then:
+  if (redirectStatusSet.has(actualResponse.status)) {
+    // 1. If actualResponse’s status is not 303, request’s body is not null,
+    // and the connection uses HTTP/2, then user agents may, and are even
+    // encouraged to, transmit an RST_STREAM frame.
+    // See, https://github.com/whatwg/fetch/issues/1288
+    if (request.redirect !== 'manual') {
+      fetchParams.controller.connection.destroy()
+    }
+
+    // 2. Switch on request’s redirect mode:
+    if (request.redirect === 'error') {
+      // Set response to a network error.
+      response = makeNetworkError('unexpected redirect')
+    } else if (request.redirect === 'manual') {
+      // Set response to an opaque-redirect filtered response whose internal
+      // response is actualResponse.
+      // NOTE(spec): On the web this would return an `opaqueredirect` response,
+      // but that doesn't make sense server side.
+      // See https://github.com/nodejs/undici/issues/1193.
+      response = actualResponse
+    } else if (request.redirect === 'follow') {
+      // Set response to the result of running HTTP-redirect fetch given
+      // fetchParams and response.
+      response = await httpRedirectFetch(fetchParams, response)
+    } else {
+      assert(false)
+    }
+  }
+
+  // 9. Set response’s timing info to timingInfo.
+  response.timingInfo = timingInfo
+
+  // 10. Return response.
+  return response
+}
+
+// https://fetch.spec.whatwg.org/#http-redirect-fetch
+function httpRedirectFetch (fetchParams, response) {
+  // 1. Let request be fetchParams’s request.
+  const request = fetchParams.request
+
+  // 2. Let actualResponse be response, if response is not a filtered response,
+  // and response’s internal response otherwise.
+  const actualResponse = response.internalResponse
+    ? response.internalResponse
+    : response
+
+  // 3. Let locationURL be actualResponse’s location URL given request’s current
+  // URL’s fragment.
+  let locationURL
+
+  try {
+    locationURL = responseLocationURL(
+      actualResponse,
+      requestCurrentURL(request).hash
+    )
+
+    // 4. If locationURL is null, then return response.
+    if (locationURL == null) {
+      return response
+    }
+  } catch (err) {
+    // 5. If locationURL is failure, then return a network error.
+    return Promise.resolve(makeNetworkError(err))
+  }
+
+  // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network
+  // error.
+  if (!urlIsHttpHttpsScheme(locationURL)) {
+    return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme'))
+  }
+
+  // 7. If request’s redirect count is 20, then return a network error.
+  if (request.redirectCount === 20) {
+    return Promise.resolve(makeNetworkError('redirect count exceeded'))
+  }
+
+  // 8. Increase request’s redirect count by 1.
+  request.redirectCount += 1
+
+  // 9. If request’s mode is "cors", locationURL includes credentials, and
+  // request’s origin is not same origin with locationURL’s origin, then return
+  //  a network error.
+  if (
+    request.mode === 'cors' &&
+    (locationURL.username || locationURL.password) &&
+    !sameOrigin(request, locationURL)
+  ) {
+    return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"'))
+  }
+
+  // 10. If request’s response tainting is "cors" and locationURL includes
+  // credentials, then return a network error.
+  if (
+    request.responseTainting === 'cors' &&
+    (locationURL.username || locationURL.password)
+  ) {
+    return Promise.resolve(makeNetworkError(
+      'URL cannot contain credentials for request mode "cors"'
+    ))
+  }
+
+  // 11. If actualResponse’s status is not 303, request’s body is non-null,
+  // and request’s body’s source is null, then return a network error.
+  if (
+    actualResponse.status !== 303 &&
+    request.body != null &&
+    request.body.source == null
+  ) {
+    return Promise.resolve(makeNetworkError())
+  }
+
+  // 12. If one of the following is true
+  // - actualResponse’s status is 301 or 302 and request’s method is `POST`
+  // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD`
+  if (
+    ([301, 302].includes(actualResponse.status) && request.method === 'POST') ||
+    (actualResponse.status === 303 &&
+      !GET_OR_HEAD.includes(request.method))
+  ) {
+    // then:
+    // 1. Set request’s method to `GET` and request’s body to null.
+    request.method = 'GET'
+    request.body = null
+
+    // 2. For each headerName of request-body-header name, delete headerName from
+    // request’s header list.
+    for (const headerName of requestBodyHeader) {
+      request.headersList.delete(headerName)
+    }
+  }
+
+  // 13. If request’s current URL’s origin is not same origin with locationURL’s
+  //     origin, then for each headerName of CORS non-wildcard request-header name,
+  //     delete headerName from request’s header list.
+  if (!sameOrigin(requestCurrentURL(request), locationURL)) {
+    // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
+    request.headersList.delete('authorization')
+
+    // https://fetch.spec.whatwg.org/#authentication-entries
+    request.headersList.delete('proxy-authorization', true)
+
+    // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
+    request.headersList.delete('cookie')
+    request.headersList.delete('host')
+  }
+
+  // 14. If request’s body is non-null, then set request’s body to the first return
+  // value of safely extracting request’s body’s source.
+  if (request.body != null) {
+    assert(request.body.source != null)
+    request.body = safelyExtractBody(request.body.source)[0]
+  }
+
+  // 15. Let timingInfo be fetchParams’s timing info.
+  const timingInfo = fetchParams.timingInfo
+
+  // 16. Set timingInfo’s redirect end time and post-redirect start time to the
+  // coarsened shared current time given fetchParams’s cross-origin isolated
+  // capability.
+  timingInfo.redirectEndTime = timingInfo.postRedirectStartTime =
+    coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability)
+
+  // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s
+  //  redirect start time to timingInfo’s start time.
+  if (timingInfo.redirectStartTime === 0) {
+    timingInfo.redirectStartTime = timingInfo.startTime
+  }
+
+  // 18. Append locationURL to request’s URL list.
+  request.urlList.push(locationURL)
+
+  // 19. Invoke set request’s referrer policy on redirect on request and
+  // actualResponse.
+  setRequestReferrerPolicyOnRedirect(request, actualResponse)
+
+  // 20. Return the result of running main fetch given fetchParams and true.
+  return mainFetch(fetchParams, true)
+}
+
+// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
+async function httpNetworkOrCacheFetch (
+  fetchParams,
+  isAuthenticationFetch = false,
+  isNewConnectionFetch = false
+) {
+  // 1. Let request be fetchParams’s request.
+  const request = fetchParams.request
+
+  // 2. Let httpFetchParams be null.
+  let httpFetchParams = null
+
+  // 3. Let httpRequest be null.
+  let httpRequest = null
+
+  // 4. Let response be null.
+  let response = null
+
+  // 5. Let storedResponse be null.
+  // TODO: cache
+
+  // 6. Let httpCache be null.
+  const httpCache = null
+
+  // 7. Let the revalidatingFlag be unset.
+  const revalidatingFlag = false
+
+  // 8. Run these steps, but abort when the ongoing fetch is terminated:
+
+  //    1. If request’s window is "no-window" and request’s redirect mode is
+  //    "error", then set httpFetchParams to fetchParams and httpRequest to
+  //    request.
+  if (request.window === 'no-window' && request.redirect === 'error') {
+    httpFetchParams = fetchParams
+    httpRequest = request
+  } else {
+    // Otherwise:
+
+    // 1. Set httpRequest to a clone of request.
+    httpRequest = makeRequest(request)
+
+    // 2. Set httpFetchParams to a copy of fetchParams.
+    httpFetchParams = { ...fetchParams }
+
+    // 3. Set httpFetchParams’s request to httpRequest.
+    httpFetchParams.request = httpRequest
+  }
+
+  //    3. Let includeCredentials be true if one of
+  const includeCredentials =
+    request.credentials === 'include' ||
+    (request.credentials === 'same-origin' &&
+      request.responseTainting === 'basic')
+
+  //    4. Let contentLength be httpRequest’s body’s length, if httpRequest’s
+  //    body is non-null; otherwise null.
+  const contentLength = httpRequest.body ? httpRequest.body.length : null
+
+  //    5. Let contentLengthHeaderValue be null.
+  let contentLengthHeaderValue = null
+
+  //    6. If httpRequest’s body is null and httpRequest’s method is `POST` or
+  //    `PUT`, then set contentLengthHeaderValue to `0`.
+  if (
+    httpRequest.body == null &&
+    ['POST', 'PUT'].includes(httpRequest.method)
+  ) {
+    contentLengthHeaderValue = '0'
+  }
+
+  //    7. If contentLength is non-null, then set contentLengthHeaderValue to
+  //    contentLength, serialized and isomorphic encoded.
+  if (contentLength != null) {
+    contentLengthHeaderValue = isomorphicEncode(`${contentLength}`)
+  }
+
+  //    8. If contentLengthHeaderValue is non-null, then append
+  //    `Content-Length`/contentLengthHeaderValue to httpRequest’s header
+  //    list.
+  if (contentLengthHeaderValue != null) {
+    httpRequest.headersList.append('content-length', contentLengthHeaderValue)
+  }
+
+  //    9. If contentLengthHeaderValue is non-null, then append (`Content-Length`,
+  //    contentLengthHeaderValue) to httpRequest’s header list.
+
+  //    10. If contentLength is non-null and httpRequest’s keepalive is true,
+  //    then:
+  if (contentLength != null && httpRequest.keepalive) {
+    // NOTE: keepalive is a noop outside of browser context.
+  }
+
+  //    11. If httpRequest’s referrer is a URL, then append
+  //    `Referer`/httpRequest’s referrer, serialized and isomorphic encoded,
+  //     to httpRequest’s header list.
+  if (httpRequest.referrer instanceof URL) {
+    httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href))
+  }
+
+  //    12. Append a request `Origin` header for httpRequest.
+  appendRequestOriginHeader(httpRequest)
+
+  //    13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA]
+  appendFetchMetadata(httpRequest)
+
+  //    14. If httpRequest’s header list does not contain `User-Agent`, then
+  //    user agents should append `User-Agent`/default `User-Agent` value to
+  //    httpRequest’s header list.
+  if (!httpRequest.headersList.contains('user-agent')) {
+    httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node')
+  }
+
+  //    15. If httpRequest’s cache mode is "default" and httpRequest’s header
+  //    list contains `If-Modified-Since`, `If-None-Match`,
+  //    `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set
+  //    httpRequest’s cache mode to "no-store".
+  if (
+    httpRequest.cache === 'default' &&
+    (httpRequest.headersList.contains('if-modified-since') ||
+      httpRequest.headersList.contains('if-none-match') ||
+      httpRequest.headersList.contains('if-unmodified-since') ||
+      httpRequest.headersList.contains('if-match') ||
+      httpRequest.headersList.contains('if-range'))
+  ) {
+    httpRequest.cache = 'no-store'
+  }
+
+  //    16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent
+  //    no-cache cache-control header modification flag is unset, and
+  //    httpRequest’s header list does not contain `Cache-Control`, then append
+  //    `Cache-Control`/`max-age=0` to httpRequest’s header list.
+  if (
+    httpRequest.cache === 'no-cache' &&
+    !httpRequest.preventNoCacheCacheControlHeaderModification &&
+    !httpRequest.headersList.contains('cache-control')
+  ) {
+    httpRequest.headersList.append('cache-control', 'max-age=0')
+  }
+
+  //    17. If httpRequest’s cache mode is "no-store" or "reload", then:
+  if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') {
+    // 1. If httpRequest’s header list does not contain `Pragma`, then append
+    // `Pragma`/`no-cache` to httpRequest’s header list.
+    if (!httpRequest.headersList.contains('pragma')) {
+      httpRequest.headersList.append('pragma', 'no-cache')
+    }
+
+    // 2. If httpRequest’s header list does not contain `Cache-Control`,
+    // then append `Cache-Control`/`no-cache` to httpRequest’s header list.
+    if (!httpRequest.headersList.contains('cache-control')) {
+      httpRequest.headersList.append('cache-control', 'no-cache')
+    }
+  }
+
+  //    18. If httpRequest’s header list contains `Range`, then append
+  //    `Accept-Encoding`/`identity` to httpRequest’s header list.
+  if (httpRequest.headersList.contains('range')) {
+    httpRequest.headersList.append('accept-encoding', 'identity')
+  }
+
+  //    19. Modify httpRequest’s header list per HTTP. Do not append a given
+  //    header if httpRequest’s header list contains that header’s name.
+  //    TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129
+  if (!httpRequest.headersList.contains('accept-encoding')) {
+    if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) {
+      httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate')
+    } else {
+      httpRequest.headersList.append('accept-encoding', 'gzip, deflate')
+    }
+  }
+
+  httpRequest.headersList.delete('host')
+
+  //    20. If includeCredentials is true, then:
+  if (includeCredentials) {
+    // 1. If the user agent is not configured to block cookies for httpRequest
+    // (see section 7 of [COOKIES]), then:
+    // TODO: credentials
+    // 2. If httpRequest’s header list does not contain `Authorization`, then:
+    // TODO: credentials
+  }
+
+  //    21. If there’s a proxy-authentication entry, use it as appropriate.
+  //    TODO: proxy-authentication
+
+  //    22. Set httpCache to the result of determining the HTTP cache
+  //    partition, given httpRequest.
+  //    TODO: cache
+
+  //    23. If httpCache is null, then set httpRequest’s cache mode to
+  //    "no-store".
+  if (httpCache == null) {
+    httpRequest.cache = 'no-store'
+  }
+
+  //    24. If httpRequest’s cache mode is neither "no-store" nor "reload",
+  //    then:
+  if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') {
+    // TODO: cache
+  }
+
+  // 9. If aborted, then return the appropriate network error for fetchParams.
+  // TODO
+
+  // 10. If response is null, then:
+  if (response == null) {
+    // 1. If httpRequest’s cache mode is "only-if-cached", then return a
+    // network error.
+    if (httpRequest.mode === 'only-if-cached') {
+      return makeNetworkError('only if cached')
+    }
+
+    // 2. Let forwardResponse be the result of running HTTP-network fetch
+    // given httpFetchParams, includeCredentials, and isNewConnectionFetch.
+    const forwardResponse = await httpNetworkFetch(
+      httpFetchParams,
+      includeCredentials,
+      isNewConnectionFetch
+    )
+
+    // 3. If httpRequest’s method is unsafe and forwardResponse’s status is
+    // in the range 200 to 399, inclusive, invalidate appropriate stored
+    // responses in httpCache, as per the "Invalidation" chapter of HTTP
+    // Caching, and set storedResponse to null. [HTTP-CACHING]
+    if (
+      !safeMethodsSet.has(httpRequest.method) &&
+      forwardResponse.status >= 200 &&
+      forwardResponse.status <= 399
+    ) {
+      // TODO: cache
+    }
+
+    // 4. If the revalidatingFlag is set and forwardResponse’s status is 304,
+    // then:
+    if (revalidatingFlag && forwardResponse.status === 304) {
+      // TODO: cache
+    }
+
+    // 5. If response is null, then:
+    if (response == null) {
+      // 1. Set response to forwardResponse.
+      response = forwardResponse
+
+      // 2. Store httpRequest and forwardResponse in httpCache, as per the
+      // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING]
+      // TODO: cache
+    }
+  }
+
+  // 11. Set response’s URL list to a clone of httpRequest’s URL list.
+  response.urlList = [...httpRequest.urlList]
+
+  // 12. If httpRequest’s header list contains `Range`, then set response’s
+  // range-requested flag.
+  if (httpRequest.headersList.contains('range')) {
+    response.rangeRequested = true
+  }
+
+  // 13. Set response’s request-includes-credentials to includeCredentials.
+  response.requestIncludesCredentials = includeCredentials
+
+  // 14. If response’s status is 401, httpRequest’s response tainting is not
+  // "cors", includeCredentials is true, and request’s window is an environment
+  // settings object, then:
+  // TODO
+
+  // 15. If response’s status is 407, then:
+  if (response.status === 407) {
+    // 1. If request’s window is "no-window", then return a network error.
+    if (request.window === 'no-window') {
+      return makeNetworkError()
+    }
+
+    // 2. ???
+
+    // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+    if (isCancelled(fetchParams)) {
+      return makeAppropriateNetworkError(fetchParams)
+    }
+
+    // 4. Prompt the end user as appropriate in request’s window and store
+    // the result as a proxy-authentication entry. [HTTP-AUTH]
+    // TODO: Invoke some kind of callback?
+
+    // 5. Set response to the result of running HTTP-network-or-cache fetch given
+    // fetchParams.
+    // TODO
+    return makeNetworkError('proxy authentication required')
+  }
+
+  // 16. If all of the following are true
+  if (
+    // response’s status is 421
+    response.status === 421 &&
+    // isNewConnectionFetch is false
+    !isNewConnectionFetch &&
+    // request’s body is null, or request’s body is non-null and request’s body’s source is non-null
+    (request.body == null || request.body.source != null)
+  ) {
+    // then:
+
+    // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams.
+    if (isCancelled(fetchParams)) {
+      return makeAppropriateNetworkError(fetchParams)
+    }
+
+    // 2. Set response to the result of running HTTP-network-or-cache
+    // fetch given fetchParams, isAuthenticationFetch, and true.
+
+    // TODO (spec): The spec doesn't specify this but we need to cancel
+    // the active response before we can start a new one.
+    // https://github.com/whatwg/fetch/issues/1293
+    fetchParams.controller.connection.destroy()
+
+    response = await httpNetworkOrCacheFetch(
+      fetchParams,
+      isAuthenticationFetch,
+      true
+    )
+  }
+
+  // 17. If isAuthenticationFetch is true, then create an authentication entry
+  if (isAuthenticationFetch) {
+    // TODO
+  }
+
+  // 18. Return response.
+  return response
+}
+
+// https://fetch.spec.whatwg.org/#http-network-fetch
+async function httpNetworkFetch (
+  fetchParams,
+  includeCredentials = false,
+  forceNewConnection = false
+) {
+  assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed)
+
+  fetchParams.controller.connection = {
+    abort: null,
+    destroyed: false,
+    destroy (err) {
+      if (!this.destroyed) {
+        this.destroyed = true
+        this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError'))
+      }
+    }
+  }
+
+  // 1. Let request be fetchParams’s request.
+  const request = fetchParams.request
+
+  // 2. Let response be null.
+  let response = null
+
+  // 3. Let timingInfo be fetchParams’s timing info.
+  const timingInfo = fetchParams.timingInfo
+
+  // 4. Let httpCache be the result of determining the HTTP cache partition,
+  // given request.
+  // TODO: cache
+  const httpCache = null
+
+  // 5. If httpCache is null, then set request’s cache mode to "no-store".
+  if (httpCache == null) {
+    request.cache = 'no-store'
+  }
+
+  // 6. Let networkPartitionKey be the result of determining the network
+  // partition key given request.
+  // TODO
+
+  // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise
+  // "no".
+  const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars
+
+  // 8. Switch on request’s mode:
+  if (request.mode === 'websocket') {
+    // Let connection be the result of obtaining a WebSocket connection,
+    // given request’s current URL.
+    // TODO
+  } else {
+    // Let connection be the result of obtaining a connection, given
+    // networkPartitionKey, request’s current URL’s origin,
+    // includeCredentials, and forceNewConnection.
+    // TODO
+  }
+
+  // 9. Run these steps, but abort when the ongoing fetch is terminated:
+
+  //    1. If connection is failure, then return a network error.
+
+  //    2. Set timingInfo’s final connection timing info to the result of
+  //    calling clamp and coarsen connection timing info with connection’s
+  //    timing info, timingInfo’s post-redirect start time, and fetchParams’s
+  //    cross-origin isolated capability.
+
+  //    3. If connection is not an HTTP/2 connection, request’s body is non-null,
+  //    and request’s body’s source is null, then append (`Transfer-Encoding`,
+  //    `chunked`) to request’s header list.
+
+  //    4. Set timingInfo’s final network-request start time to the coarsened
+  //    shared current time given fetchParams’s cross-origin isolated
+  //    capability.
+
+  //    5. Set response to the result of making an HTTP request over connection
+  //    using request with the following caveats:
+
+  //        - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS]
+  //        [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH]
+
+  //        - If request’s body is non-null, and request’s body’s source is null,
+  //        then the user agent may have a buffer of up to 64 kibibytes and store
+  //        a part of request’s body in that buffer. If the user agent reads from
+  //        request’s body beyond that buffer’s size and the user agent needs to
+  //        resend request, then instead return a network error.
+
+  //        - Set timingInfo’s final network-response start time to the coarsened
+  //        shared current time given fetchParams’s cross-origin isolated capability,
+  //        immediately after the user agent’s HTTP parser receives the first byte
+  //        of the response (e.g., frame header bytes for HTTP/2 or response status
+  //        line for HTTP/1.x).
+
+  //        - Wait until all the headers are transmitted.
+
+  //        - Any responses whose status is in the range 100 to 199, inclusive,
+  //        and is not 101, are to be ignored, except for the purposes of setting
+  //        timingInfo’s final network-response start time above.
+
+  //    - If request’s header list contains `Transfer-Encoding`/`chunked` and
+  //    response is transferred via HTTP/1.0 or older, then return a network
+  //    error.
+
+  //    - If the HTTP request results in a TLS client certificate dialog, then:
+
+  //        1. If request’s window is an environment settings object, make the
+  //        dialog available in request’s window.
+
+  //        2. Otherwise, return a network error.
+
+  // To transmit request’s body body, run these steps:
+  let requestBody = null
+  // 1. If body is null and fetchParams’s process request end-of-body is
+  // non-null, then queue a fetch task given fetchParams’s process request
+  // end-of-body and fetchParams’s task destination.
+  if (request.body == null && fetchParams.processRequestEndOfBody) {
+    queueMicrotask(() => fetchParams.processRequestEndOfBody())
+  } else if (request.body != null) {
+    // 2. Otherwise, if body is non-null:
+
+    //    1. Let processBodyChunk given bytes be these steps:
+    const processBodyChunk = async function * (bytes) {
+      // 1. If the ongoing fetch is terminated, then abort these steps.
+      if (isCancelled(fetchParams)) {
+        return
+      }
+
+      // 2. Run this step in parallel: transmit bytes.
+      yield bytes
+
+      // 3. If fetchParams’s process request body is non-null, then run
+      // fetchParams’s process request body given bytes’s length.
+      fetchParams.processRequestBodyChunkLength?.(bytes.byteLength)
+    }
+
+    // 2. Let processEndOfBody be these steps:
+    const processEndOfBody = () => {
+      // 1. If fetchParams is canceled, then abort these steps.
+      if (isCancelled(fetchParams)) {
+        return
+      }
+
+      // 2. If fetchParams’s process request end-of-body is non-null,
+      // then run fetchParams’s process request end-of-body.
+      if (fetchParams.processRequestEndOfBody) {
+        fetchParams.processRequestEndOfBody()
+      }
+    }
+
+    // 3. Let processBodyError given e be these steps:
+    const processBodyError = (e) => {
+      // 1. If fetchParams is canceled, then abort these steps.
+      if (isCancelled(fetchParams)) {
+        return
+      }
+
+      // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller.
+      if (e.name === 'AbortError') {
+        fetchParams.controller.abort()
+      } else {
+        fetchParams.controller.terminate(e)
+      }
+    }
+
+    // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody,
+    // processBodyError, and fetchParams’s task destination.
+    requestBody = (async function * () {
+      try {
+        for await (const bytes of request.body.stream) {
+          yield * processBodyChunk(bytes)
+        }
+        processEndOfBody()
+      } catch (err) {
+        processBodyError(err)
+      }
+    })()
+  }
+
+  try {
+    // socket is only provided for websockets
+    const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody })
+
+    if (socket) {
+      response = makeResponse({ status, statusText, headersList, socket })
+    } else {
+      const iterator = body[Symbol.asyncIterator]()
+      fetchParams.controller.next = () => iterator.next()
+
+      response = makeResponse({ status, statusText, headersList })
+    }
+  } catch (err) {
+    // 10. If aborted, then:
+    if (err.name === 'AbortError') {
+      // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+      fetchParams.controller.connection.destroy()
+
+      // 2. Return the appropriate network error for fetchParams.
+      return makeAppropriateNetworkError(fetchParams, err)
+    }
+
+    return makeNetworkError(err)
+  }
+
+  // 11. Let pullAlgorithm be an action that resumes the ongoing fetch
+  // if it is suspended.
+  const pullAlgorithm = () => {
+    fetchParams.controller.resume()
+  }
+
+  // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s
+  // controller with reason, given reason.
+  const cancelAlgorithm = (reason) => {
+    fetchParams.controller.abort(reason)
+  }
+
+  // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by
+  // the user agent.
+  // TODO
+
+  // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object
+  // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent.
+  // TODO
+
+  // 15. Let stream be a new ReadableStream.
+  // 16. Set up stream with pullAlgorithm set to pullAlgorithm,
+  // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to
+  // highWaterMark, and sizeAlgorithm set to sizeAlgorithm.
+  if (!ReadableStream) {
+    ReadableStream = (__nccwpck_require__(3774).ReadableStream)
+  }
+
+  const stream = new ReadableStream(
+    {
+      async start (controller) {
+        fetchParams.controller.controller = controller
+      },
+      async pull (controller) {
+        await pullAlgorithm(controller)
+      },
+      async cancel (reason) {
+        await cancelAlgorithm(reason)
+      }
+    },
+    {
+      highWaterMark: 0,
+      size () {
+        return 1
+      }
+    }
+  )
+
+  // 17. Run these steps, but abort when the ongoing fetch is terminated:
+
+  //    1. Set response’s body to a new body whose stream is stream.
+  response.body = { stream }
+
+  //    2. If response is not a network error and request’s cache mode is
+  //    not "no-store", then update response in httpCache for request.
+  //    TODO
+
+  //    3. If includeCredentials is true and the user agent is not configured
+  //    to block cookies for request (see section 7 of [COOKIES]), then run the
+  //    "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on
+  //    the value of each header whose name is a byte-case-insensitive match for
+  //    `Set-Cookie` in response’s header list, if any, and request’s current URL.
+  //    TODO
+
+  // 18. If aborted, then:
+  // TODO
+
+  // 19. Run these steps in parallel:
+
+  //    1. Run these steps, but abort when fetchParams is canceled:
+  fetchParams.controller.on('terminated', onAborted)
+  fetchParams.controller.resume = async () => {
+    // 1. While true
+    while (true) {
+      // 1-3. See onData...
+
+      // 4. Set bytes to the result of handling content codings given
+      // codings and bytes.
+      let bytes
+      let isFailure
+      try {
+        const { done, value } = await fetchParams.controller.next()
+
+        if (isAborted(fetchParams)) {
+          break
+        }
+
+        bytes = done ? undefined : value
+      } catch (err) {
+        if (fetchParams.controller.ended && !timingInfo.encodedBodySize) {
+          // zlib doesn't like empty streams.
+          bytes = undefined
+        } else {
+          bytes = err
+
+          // err may be propagated from the result of calling readablestream.cancel,
+          // which might not be an error. https://github.com/nodejs/undici/issues/2009
+          isFailure = true
+        }
+      }
+
+      if (bytes === undefined) {
+        // 2. Otherwise, if the bytes transmission for response’s message
+        // body is done normally and stream is readable, then close
+        // stream, finalize response for fetchParams and response, and
+        // abort these in-parallel steps.
+        readableStreamClose(fetchParams.controller.controller)
+
+        finalizeResponse(fetchParams, response)
+
+        return
+      }
+
+      // 5. Increase timingInfo’s decoded body size by bytes’s length.
+      timingInfo.decodedBodySize += bytes?.byteLength ?? 0
+
+      // 6. If bytes is failure, then terminate fetchParams’s controller.
+      if (isFailure) {
+        fetchParams.controller.terminate(bytes)
+        return
+      }
+
+      // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes
+      // into stream.
+      fetchParams.controller.controller.enqueue(new Uint8Array(bytes))
+
+      // 8. If stream is errored, then terminate the ongoing fetch.
+      if (isErrored(stream)) {
+        fetchParams.controller.terminate()
+        return
+      }
+
+      // 9. If stream doesn’t need more data ask the user agent to suspend
+      // the ongoing fetch.
+      if (!fetchParams.controller.controller.desiredSize) {
+        return
+      }
+    }
+  }
+
+  //    2. If aborted, then:
+  function onAborted (reason) {
+    // 2. If fetchParams is aborted, then:
+    if (isAborted(fetchParams)) {
+      // 1. Set response’s aborted flag.
+      response.aborted = true
+
+      // 2. If stream is readable, then error stream with the result of
+      //    deserialize a serialized abort reason given fetchParams’s
+      //    controller’s serialized abort reason and an
+      //    implementation-defined realm.
+      if (isReadable(stream)) {
+        fetchParams.controller.controller.error(
+          fetchParams.controller.serializedAbortReason
+        )
+      }
+    } else {
+      // 3. Otherwise, if stream is readable, error stream with a TypeError.
+      if (isReadable(stream)) {
+        fetchParams.controller.controller.error(new TypeError('terminated', {
+          cause: isErrorLike(reason) ? reason : undefined
+        }))
+      }
+    }
+
+    // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame.
+    // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so.
+    fetchParams.controller.connection.destroy()
+  }
+
+  // 20. Return response.
+  return response
+
+  async function dispatch ({ body }) {
+    const url = requestCurrentURL(request)
+    /** @type {import('../..').Agent} */
+    const agent = fetchParams.controller.dispatcher
+
+    return new Promise((resolve, reject) => agent.dispatch(
+      {
+        path: url.pathname + url.search,
+        origin: url.origin,
+        method: request.method,
+        body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body,
+        headers: request.headersList.entries,
+        maxRedirections: 0,
+        upgrade: request.mode === 'websocket' ? 'websocket' : undefined
+      },
+      {
+        body: null,
+        abort: null,
+
+        onConnect (abort) {
+          // TODO (fix): Do we need connection here?
+          const { connection } = fetchParams.controller
+
+          if (connection.destroyed) {
+            abort(new DOMException('The operation was aborted.', 'AbortError'))
+          } else {
+            fetchParams.controller.on('terminated', abort)
+            this.abort = connection.abort = abort
+          }
+        },
+
+        onHeaders (status, headersList, resume, statusText) {
+          if (status < 200) {
+            return
+          }
+
+          let codings = []
+          let location = ''
+
+          const headers = new Headers()
+
+          // For H2, the headers are a plain JS object
+          // We distinguish between them and iterate accordingly
+          if (Array.isArray(headersList)) {
+            for (let n = 0; n < headersList.length; n += 2) {
+              const key = headersList[n + 0].toString('latin1')
+              const val = headersList[n + 1].toString('latin1')
+              if (key.toLowerCase() === 'content-encoding') {
+                // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+                // "All content-coding values are case-insensitive..."
+                codings = val.toLowerCase().split(',').map((x) => x.trim())
+              } else if (key.toLowerCase() === 'location') {
+                location = val
+              }
+
+              headers[kHeadersList].append(key, val)
+            }
+          } else {
+            const keys = Object.keys(headersList)
+            for (const key of keys) {
+              const val = headersList[key]
+              if (key.toLowerCase() === 'content-encoding') {
+                // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+                // "All content-coding values are case-insensitive..."
+                codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()
+              } else if (key.toLowerCase() === 'location') {
+                location = val
+              }
+
+              headers[kHeadersList].append(key, val)
+            }
+          }
+
+          this.body = new Readable({ read: resume })
+
+          const decoders = []
+
+          const willFollow = request.redirect === 'follow' &&
+            location &&
+            redirectStatusSet.has(status)
+
+          // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
+          if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
+            for (const coding of codings) {
+              // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
+              if (coding === 'x-gzip' || coding === 'gzip') {
+                decoders.push(zlib.createGunzip({
+                  // Be less strict when decoding compressed responses, since sometimes
+                  // servers send slightly invalid responses that are still accepted
+                  // by common browsers.
+                  // Always using Z_SYNC_FLUSH is what cURL does.
+                  flush: zlib.constants.Z_SYNC_FLUSH,
+                  finishFlush: zlib.constants.Z_SYNC_FLUSH
+                }))
+              } else if (coding === 'deflate') {
+                decoders.push(zlib.createInflate())
+              } else if (coding === 'br') {
+                decoders.push(zlib.createBrotliDecompress())
+              } else {
+                decoders.length = 0
+                break
+              }
+            }
+          }
+
+          resolve({
+            status,
+            statusText,
+            headersList: headers[kHeadersList],
+            body: decoders.length
+              ? pipeline(this.body, ...decoders, () => { })
+              : this.body.on('error', () => {})
+          })
+
+          return true
+        },
+
+        onData (chunk) {
+          if (fetchParams.controller.dump) {
+            return
+          }
+
+          // 1. If one or more bytes have been transmitted from response’s
+          // message body, then:
+
+          //  1. Let bytes be the transmitted bytes.
+          const bytes = chunk
+
+          //  2. Let codings be the result of extracting header list values
+          //  given `Content-Encoding` and response’s header list.
+          //  See pullAlgorithm.
+
+          //  3. Increase timingInfo’s encoded body size by bytes’s length.
+          timingInfo.encodedBodySize += bytes.byteLength
+
+          //  4. See pullAlgorithm...
+
+          return this.body.push(bytes)
+        },
+
+        onComplete () {
+          if (this.abort) {
+            fetchParams.controller.off('terminated', this.abort)
+          }
+
+          fetchParams.controller.ended = true
+
+          this.body.push(null)
+        },
+
+        onError (error) {
+          if (this.abort) {
+            fetchParams.controller.off('terminated', this.abort)
+          }
+
+          this.body?.destroy(error)
+
+          fetchParams.controller.terminate(error)
+
+          reject(error)
+        },
+
+        onUpgrade (status, headersList, socket) {
+          if (status !== 101) {
+            return
+          }
+
+          const headers = new Headers()
+
+          for (let n = 0; n < headersList.length; n += 2) {
+            const key = headersList[n + 0].toString('latin1')
+            const val = headersList[n + 1].toString('latin1')
+
+            headers[kHeadersList].append(key, val)
+          }
+
+          resolve({
+            status,
+            statusText: STATUS_CODES[status],
+            headersList: headers[kHeadersList],
+            socket
+          })
+
+          return true
+        }
+      }
+    ))
+  }
+}
+
+module.exports = {
+  fetch,
+  Fetch,
+  fetching,
+  finalizeAndReportTiming
+}
+
+
+/***/ }),
+
+/***/ 5767:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+/* globals AbortController */
+
+
+
+const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(1380)
+const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(2908)
+const { FinalizationRegistry } = __nccwpck_require__(547)()
+const util = __nccwpck_require__(3465)
+const {
+  isValidHTTPToken,
+  sameOrigin,
+  normalizeMethod,
+  makePolicyContainer,
+  normalizeMethodRecord
+} = __nccwpck_require__(2696)
+const {
+  forbiddenMethodsSet,
+  corsSafeListedMethodsSet,
+  referrerPolicy,
+  requestRedirect,
+  requestMode,
+  requestCredentials,
+  requestCache,
+  requestDuplex
+} = __nccwpck_require__(4135)
+const { kEnumerableProperty } = util
+const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(8323)
+const { webidl } = __nccwpck_require__(29)
+const { getGlobalOrigin } = __nccwpck_require__(9963)
+const { URLSerializer } = __nccwpck_require__(3911)
+const { kHeadersList, kConstruct } = __nccwpck_require__(4856)
+const assert = __nccwpck_require__(2613)
+const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(4434)
+
+let TransformStream = globalThis.TransformStream
+
+const kAbortController = Symbol('abortController')
+
+const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => {
+  signal.removeEventListener('abort', abort)
+})
+
+// https://fetch.spec.whatwg.org/#request-class
+class Request {
+  // https://fetch.spec.whatwg.org/#dom-request
+  constructor (input, init = {}) {
+    if (input === kConstruct) {
+      return
+    }
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' })
+
+    input = webidl.converters.RequestInfo(input)
+    init = webidl.converters.RequestInit(init)
+
+    // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object
+    this[kRealm] = {
+      settingsObject: {
+        baseUrl: getGlobalOrigin(),
+        get origin () {
+          return this.baseUrl?.origin
+        },
+        policyContainer: makePolicyContainer()
+      }
+    }
+
+    // 1. Let request be null.
+    let request = null
+
+    // 2. Let fallbackMode be null.
+    let fallbackMode = null
+
+    // 3. Let baseURL be this’s relevant settings object’s API base URL.
+    const baseUrl = this[kRealm].settingsObject.baseUrl
+
+    // 4. Let signal be null.
+    let signal = null
+
+    // 5. If input is a string, then:
+    if (typeof input === 'string') {
+      // 1. Let parsedURL be the result of parsing input with baseURL.
+      // 2. If parsedURL is failure, then throw a TypeError.
+      let parsedURL
+      try {
+        parsedURL = new URL(input, baseUrl)
+      } catch (err) {
+        throw new TypeError('Failed to parse URL from ' + input, { cause: err })
+      }
+
+      // 3. If parsedURL includes credentials, then throw a TypeError.
+      if (parsedURL.username || parsedURL.password) {
+        throw new TypeError(
+          'Request cannot be constructed from a URL that includes credentials: ' +
+            input
+        )
+      }
+
+      // 4. Set request to a new request whose URL is parsedURL.
+      request = makeRequest({ urlList: [parsedURL] })
+
+      // 5. Set fallbackMode to "cors".
+      fallbackMode = 'cors'
+    } else {
+      // 6. Otherwise:
+
+      // 7. Assert: input is a Request object.
+      assert(input instanceof Request)
+
+      // 8. Set request to input’s request.
+      request = input[kState]
+
+      // 9. Set signal to input’s signal.
+      signal = input[kSignal]
+    }
+
+    // 7. Let origin be this’s relevant settings object’s origin.
+    const origin = this[kRealm].settingsObject.origin
+
+    // 8. Let window be "client".
+    let window = 'client'
+
+    // 9. If request’s window is an environment settings object and its origin
+    // is same origin with origin, then set window to request’s window.
+    if (
+      request.window?.constructor?.name === 'EnvironmentSettingsObject' &&
+      sameOrigin(request.window, origin)
+    ) {
+      window = request.window
+    }
+
+    // 10. If init["window"] exists and is non-null, then throw a TypeError.
+    if (init.window != null) {
+      throw new TypeError(`'window' option '${window}' must be null`)
+    }
+
+    // 11. If init["window"] exists, then set window to "no-window".
+    if ('window' in init) {
+      window = 'no-window'
+    }
+
+    // 12. Set request to a new request with the following properties:
+    request = makeRequest({
+      // URL request’s URL.
+      // undici implementation note: this is set as the first item in request's urlList in makeRequest
+      // method request’s method.
+      method: request.method,
+      // header list A copy of request’s header list.
+      // undici implementation note: headersList is cloned in makeRequest
+      headersList: request.headersList,
+      // unsafe-request flag Set.
+      unsafeRequest: request.unsafeRequest,
+      // client This’s relevant settings object.
+      client: this[kRealm].settingsObject,
+      // window window.
+      window,
+      // priority request’s priority.
+      priority: request.priority,
+      // origin request’s origin. The propagation of the origin is only significant for navigation requests
+      // being handled by a service worker. In this scenario a request can have an origin that is different
+      // from the current client.
+      origin: request.origin,
+      // referrer request’s referrer.
+      referrer: request.referrer,
+      // referrer policy request’s referrer policy.
+      referrerPolicy: request.referrerPolicy,
+      // mode request’s mode.
+      mode: request.mode,
+      // credentials mode request’s credentials mode.
+      credentials: request.credentials,
+      // cache mode request’s cache mode.
+      cache: request.cache,
+      // redirect mode request’s redirect mode.
+      redirect: request.redirect,
+      // integrity metadata request’s integrity metadata.
+      integrity: request.integrity,
+      // keepalive request’s keepalive.
+      keepalive: request.keepalive,
+      // reload-navigation flag request’s reload-navigation flag.
+      reloadNavigation: request.reloadNavigation,
+      // history-navigation flag request’s history-navigation flag.
+      historyNavigation: request.historyNavigation,
+      // URL list A clone of request’s URL list.
+      urlList: [...request.urlList]
+    })
+
+    const initHasKey = Object.keys(init).length !== 0
+
+    // 13. If init is not empty, then:
+    if (initHasKey) {
+      // 1. If request’s mode is "navigate", then set it to "same-origin".
+      if (request.mode === 'navigate') {
+        request.mode = 'same-origin'
+      }
+
+      // 2. Unset request’s reload-navigation flag.
+      request.reloadNavigation = false
+
+      // 3. Unset request’s history-navigation flag.
+      request.historyNavigation = false
+
+      // 4. Set request’s origin to "client".
+      request.origin = 'client'
+
+      // 5. Set request’s referrer to "client"
+      request.referrer = 'client'
+
+      // 6. Set request’s referrer policy to the empty string.
+      request.referrerPolicy = ''
+
+      // 7. Set request’s URL to request’s current URL.
+      request.url = request.urlList[request.urlList.length - 1]
+
+      // 8. Set request’s URL list to « request’s URL ».
+      request.urlList = [request.url]
+    }
+
+    // 14. If init["referrer"] exists, then:
+    if (init.referrer !== undefined) {
+      // 1. Let referrer be init["referrer"].
+      const referrer = init.referrer
+
+      // 2. If referrer is the empty string, then set request’s referrer to "no-referrer".
+      if (referrer === '') {
+        request.referrer = 'no-referrer'
+      } else {
+        // 1. Let parsedReferrer be the result of parsing referrer with
+        // baseURL.
+        // 2. If parsedReferrer is failure, then throw a TypeError.
+        let parsedReferrer
+        try {
+          parsedReferrer = new URL(referrer, baseUrl)
+        } catch (err) {
+          throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err })
+        }
+
+        // 3. If one of the following is true
+        // - parsedReferrer’s scheme is "about" and path is the string "client"
+        // - parsedReferrer’s origin is not same origin with origin
+        // then set request’s referrer to "client".
+        if (
+          (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') ||
+          (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl))
+        ) {
+          request.referrer = 'client'
+        } else {
+          // 4. Otherwise, set request’s referrer to parsedReferrer.
+          request.referrer = parsedReferrer
+        }
+      }
+    }
+
+    // 15. If init["referrerPolicy"] exists, then set request’s referrer policy
+    // to it.
+    if (init.referrerPolicy !== undefined) {
+      request.referrerPolicy = init.referrerPolicy
+    }
+
+    // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise.
+    let mode
+    if (init.mode !== undefined) {
+      mode = init.mode
+    } else {
+      mode = fallbackMode
+    }
+
+    // 17. If mode is "navigate", then throw a TypeError.
+    if (mode === 'navigate') {
+      throw webidl.errors.exception({
+        header: 'Request constructor',
+        message: 'invalid request mode navigate.'
+      })
+    }
+
+    // 18. If mode is non-null, set request’s mode to mode.
+    if (mode != null) {
+      request.mode = mode
+    }
+
+    // 19. If init["credentials"] exists, then set request’s credentials mode
+    // to it.
+    if (init.credentials !== undefined) {
+      request.credentials = init.credentials
+    }
+
+    // 18. If init["cache"] exists, then set request’s cache mode to it.
+    if (init.cache !== undefined) {
+      request.cache = init.cache
+    }
+
+    // 21. If request’s cache mode is "only-if-cached" and request’s mode is
+    // not "same-origin", then throw a TypeError.
+    if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {
+      throw new TypeError(
+        "'only-if-cached' can be set only with 'same-origin' mode"
+      )
+    }
+
+    // 22. If init["redirect"] exists, then set request’s redirect mode to it.
+    if (init.redirect !== undefined) {
+      request.redirect = init.redirect
+    }
+
+    // 23. If init["integrity"] exists, then set request’s integrity metadata to it.
+    if (init.integrity != null) {
+      request.integrity = String(init.integrity)
+    }
+
+    // 24. If init["keepalive"] exists, then set request’s keepalive to it.
+    if (init.keepalive !== undefined) {
+      request.keepalive = Boolean(init.keepalive)
+    }
+
+    // 25. If init["method"] exists, then:
+    if (init.method !== undefined) {
+      // 1. Let method be init["method"].
+      let method = init.method
+
+      // 2. If method is not a method or method is a forbidden method, then
+      // throw a TypeError.
+      if (!isValidHTTPToken(method)) {
+        throw new TypeError(`'${method}' is not a valid HTTP method.`)
+      }
+
+      if (forbiddenMethodsSet.has(method.toUpperCase())) {
+        throw new TypeError(`'${method}' HTTP method is unsupported.`)
+      }
+
+      // 3. Normalize method.
+      method = normalizeMethodRecord[method] ?? normalizeMethod(method)
+
+      // 4. Set request’s method to method.
+      request.method = method
+    }
+
+    // 26. If init["signal"] exists, then set signal to it.
+    if (init.signal !== undefined) {
+      signal = init.signal
+    }
+
+    // 27. Set this’s request to request.
+    this[kState] = request
+
+    // 28. Set this’s signal to a new AbortSignal object with this’s relevant
+    // Realm.
+    // TODO: could this be simplified with AbortSignal.any
+    // (https://dom.spec.whatwg.org/#dom-abortsignal-any)
+    const ac = new AbortController()
+    this[kSignal] = ac.signal
+    this[kSignal][kRealm] = this[kRealm]
+
+    // 29. If signal is not null, then make this’s signal follow signal.
+    if (signal != null) {
+      if (
+        !signal ||
+        typeof signal.aborted !== 'boolean' ||
+        typeof signal.addEventListener !== 'function'
+      ) {
+        throw new TypeError(
+          "Failed to construct 'Request': member signal is not of type AbortSignal."
+        )
+      }
+
+      if (signal.aborted) {
+        ac.abort(signal.reason)
+      } else {
+        // Keep a strong ref to ac while request object
+        // is alive. This is needed to prevent AbortController
+        // from being prematurely garbage collected.
+        // See, https://github.com/nodejs/undici/issues/1926.
+        this[kAbortController] = ac
+
+        const acRef = new WeakRef(ac)
+        const abort = function () {
+          const ac = acRef.deref()
+          if (ac !== undefined) {
+            ac.abort(this.reason)
+          }
+        }
+
+        // Third-party AbortControllers may not work with these.
+        // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619.
+        try {
+          // If the max amount of listeners is equal to the default, increase it
+          // This is only available in node >= v19.9.0
+          if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) {
+            setMaxListeners(100, signal)
+          } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) {
+            setMaxListeners(100, signal)
+          }
+        } catch {}
+
+        util.addAbortListener(signal, abort)
+        requestFinalizer.register(ac, { signal, abort })
+      }
+    }
+
+    // 30. Set this’s headers to a new Headers object with this’s relevant
+    // Realm, whose header list is request’s header list and guard is
+    // "request".
+    this[kHeaders] = new Headers(kConstruct)
+    this[kHeaders][kHeadersList] = request.headersList
+    this[kHeaders][kGuard] = 'request'
+    this[kHeaders][kRealm] = this[kRealm]
+
+    // 31. If this’s request’s mode is "no-cors", then:
+    if (mode === 'no-cors') {
+      // 1. If this’s request’s method is not a CORS-safelisted method,
+      // then throw a TypeError.
+      if (!corsSafeListedMethodsSet.has(request.method)) {
+        throw new TypeError(
+          `'${request.method} is unsupported in no-cors mode.`
+        )
+      }
+
+      // 2. Set this’s headers’s guard to "request-no-cors".
+      this[kHeaders][kGuard] = 'request-no-cors'
+    }
+
+    // 32. If init is not empty, then:
+    if (initHasKey) {
+      /** @type {HeadersList} */
+      const headersList = this[kHeaders][kHeadersList]
+      // 1. Let headers be a copy of this’s headers and its associated header
+      // list.
+      // 2. If init["headers"] exists, then set headers to init["headers"].
+      const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList)
+
+      // 3. Empty this’s headers’s header list.
+      headersList.clear()
+
+      // 4. If headers is a Headers object, then for each header in its header
+      // list, append header’s name/header’s value to this’s headers.
+      if (headers instanceof HeadersList) {
+        for (const [key, val] of headers) {
+          headersList.append(key, val)
+        }
+        // Note: Copy the `set-cookie` meta-data.
+        headersList.cookies = headers.cookies
+      } else {
+        // 5. Otherwise, fill this’s headers with headers.
+        fillHeaders(this[kHeaders], headers)
+      }
+    }
+
+    // 33. Let inputBody be input’s request’s body if input is a Request
+    // object; otherwise null.
+    const inputBody = input instanceof Request ? input[kState].body : null
+
+    // 34. If either init["body"] exists and is non-null or inputBody is
+    // non-null, and request’s method is `GET` or `HEAD`, then throw a
+    // TypeError.
+    if (
+      (init.body != null || inputBody != null) &&
+      (request.method === 'GET' || request.method === 'HEAD')
+    ) {
+      throw new TypeError('Request with GET/HEAD method cannot have body.')
+    }
+
+    // 35. Let initBody be null.
+    let initBody = null
+
+    // 36. If init["body"] exists and is non-null, then:
+    if (init.body != null) {
+      // 1. Let Content-Type be null.
+      // 2. Set initBody and Content-Type to the result of extracting
+      // init["body"], with keepalive set to request’s keepalive.
+      const [extractedBody, contentType] = extractBody(
+        init.body,
+        request.keepalive
+      )
+      initBody = extractedBody
+
+      // 3, If Content-Type is non-null and this’s headers’s header list does
+      // not contain `Content-Type`, then append `Content-Type`/Content-Type to
+      // this’s headers.
+      if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) {
+        this[kHeaders].append('content-type', contentType)
+      }
+    }
+
+    // 37. Let inputOrInitBody be initBody if it is non-null; otherwise
+    // inputBody.
+    const inputOrInitBody = initBody ?? inputBody
+
+    // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is
+    // null, then:
+    if (inputOrInitBody != null && inputOrInitBody.source == null) {
+      // 1. If initBody is non-null and init["duplex"] does not exist,
+      //    then throw a TypeError.
+      if (initBody != null && init.duplex == null) {
+        throw new TypeError('RequestInit: duplex option is required when sending a body.')
+      }
+
+      // 2. If this’s request’s mode is neither "same-origin" nor "cors",
+      // then throw a TypeError.
+      if (request.mode !== 'same-origin' && request.mode !== 'cors') {
+        throw new TypeError(
+          'If request is made from ReadableStream, mode should be "same-origin" or "cors"'
+        )
+      }
+
+      // 3. Set this’s request’s use-CORS-preflight flag.
+      request.useCORSPreflightFlag = true
+    }
+
+    // 39. Let finalBody be inputOrInitBody.
+    let finalBody = inputOrInitBody
+
+    // 40. If initBody is null and inputBody is non-null, then:
+    if (initBody == null && inputBody != null) {
+      // 1. If input is unusable, then throw a TypeError.
+      if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) {
+        throw new TypeError(
+          'Cannot construct a Request with a Request object that has already been used.'
+        )
+      }
+
+      // 2. Set finalBody to the result of creating a proxy for inputBody.
+      if (!TransformStream) {
+        TransformStream = (__nccwpck_require__(3774).TransformStream)
+      }
+
+      // https://streams.spec.whatwg.org/#readablestream-create-a-proxy
+      const identityTransform = new TransformStream()
+      inputBody.stream.pipeThrough(identityTransform)
+      finalBody = {
+        source: inputBody.source,
+        length: inputBody.length,
+        stream: identityTransform.readable
+      }
+    }
+
+    // 41. Set this’s request’s body to finalBody.
+    this[kState].body = finalBody
+  }
+
+  // Returns request’s HTTP method, which is "GET" by default.
+  get method () {
+    webidl.brandCheck(this, Request)
+
+    // The method getter steps are to return this’s request’s method.
+    return this[kState].method
+  }
+
+  // Returns the URL of request as a string.
+  get url () {
+    webidl.brandCheck(this, Request)
+
+    // The url getter steps are to return this’s request’s URL, serialized.
+    return URLSerializer(this[kState].url)
+  }
+
+  // Returns a Headers object consisting of the headers associated with request.
+  // Note that headers added in the network layer by the user agent will not
+  // be accounted for in this object, e.g., the "Host" header.
+  get headers () {
+    webidl.brandCheck(this, Request)
+
+    // The headers getter steps are to return this’s headers.
+    return this[kHeaders]
+  }
+
+  // Returns the kind of resource requested by request, e.g., "document"
+  // or "script".
+  get destination () {
+    webidl.brandCheck(this, Request)
+
+    // The destination getter are to return this’s request’s destination.
+    return this[kState].destination
+  }
+
+  // Returns the referrer of request. Its value can be a same-origin URL if
+  // explicitly set in init, the empty string to indicate no referrer, and
+  // "about:client" when defaulting to the global’s default. This is used
+  // during fetching to determine the value of the `Referer` header of the
+  // request being made.
+  get referrer () {
+    webidl.brandCheck(this, Request)
+
+    // 1. If this’s request’s referrer is "no-referrer", then return the
+    // empty string.
+    if (this[kState].referrer === 'no-referrer') {
+      return ''
+    }
+
+    // 2. If this’s request’s referrer is "client", then return
+    // "about:client".
+    if (this[kState].referrer === 'client') {
+      return 'about:client'
+    }
+
+    // Return this’s request’s referrer, serialized.
+    return this[kState].referrer.toString()
+  }
+
+  // Returns the referrer policy associated with request.
+  // This is used during fetching to compute the value of the request’s
+  // referrer.
+  get referrerPolicy () {
+    webidl.brandCheck(this, Request)
+
+    // The referrerPolicy getter steps are to return this’s request’s referrer policy.
+    return this[kState].referrerPolicy
+  }
+
+  // Returns the mode associated with request, which is a string indicating
+  // whether the request will use CORS, or will be restricted to same-origin
+  // URLs.
+  get mode () {
+    webidl.brandCheck(this, Request)
+
+    // The mode getter steps are to return this’s request’s mode.
+    return this[kState].mode
+  }
+
+  // Returns the credentials mode associated with request,
+  // which is a string indicating whether credentials will be sent with the
+  // request always, never, or only when sent to a same-origin URL.
+  get credentials () {
+    // The credentials getter steps are to return this’s request’s credentials mode.
+    return this[kState].credentials
+  }
+
+  // Returns the cache mode associated with request,
+  // which is a string indicating how the request will
+  // interact with the browser’s cache when fetching.
+  get cache () {
+    webidl.brandCheck(this, Request)
+
+    // The cache getter steps are to return this’s request’s cache mode.
+    return this[kState].cache
+  }
+
+  // Returns the redirect mode associated with request,
+  // which is a string indicating how redirects for the
+  // request will be handled during fetching. A request
+  // will follow redirects by default.
+  get redirect () {
+    webidl.brandCheck(this, Request)
+
+    // The redirect getter steps are to return this’s request’s redirect mode.
+    return this[kState].redirect
+  }
+
+  // Returns request’s subresource integrity metadata, which is a
+  // cryptographic hash of the resource being fetched. Its value
+  // consists of multiple hashes separated by whitespace. [SRI]
+  get integrity () {
+    webidl.brandCheck(this, Request)
+
+    // The integrity getter steps are to return this’s request’s integrity
+    // metadata.
+    return this[kState].integrity
+  }
+
+  // Returns a boolean indicating whether or not request can outlive the
+  // global in which it was created.
+  get keepalive () {
+    webidl.brandCheck(this, Request)
+
+    // The keepalive getter steps are to return this’s request’s keepalive.
+    return this[kState].keepalive
+  }
+
+  // Returns a boolean indicating whether or not request is for a reload
+  // navigation.
+  get isReloadNavigation () {
+    webidl.brandCheck(this, Request)
+
+    // The isReloadNavigation getter steps are to return true if this’s
+    // request’s reload-navigation flag is set; otherwise false.
+    return this[kState].reloadNavigation
+  }
+
+  // Returns a boolean indicating whether or not request is for a history
+  // navigation (a.k.a. back-foward navigation).
+  get isHistoryNavigation () {
+    webidl.brandCheck(this, Request)
+
+    // The isHistoryNavigation getter steps are to return true if this’s request’s
+    // history-navigation flag is set; otherwise false.
+    return this[kState].historyNavigation
+  }
+
+  // Returns the signal associated with request, which is an AbortSignal
+  // object indicating whether or not request has been aborted, and its
+  // abort event handler.
+  get signal () {
+    webidl.brandCheck(this, Request)
+
+    // The signal getter steps are to return this’s signal.
+    return this[kSignal]
+  }
+
+  get body () {
+    webidl.brandCheck(this, Request)
+
+    return this[kState].body ? this[kState].body.stream : null
+  }
+
+  get bodyUsed () {
+    webidl.brandCheck(this, Request)
+
+    return !!this[kState].body && util.isDisturbed(this[kState].body.stream)
+  }
+
+  get duplex () {
+    webidl.brandCheck(this, Request)
+
+    return 'half'
+  }
+
+  // Returns a clone of request.
+  clone () {
+    webidl.brandCheck(this, Request)
+
+    // 1. If this is unusable, then throw a TypeError.
+    if (this.bodyUsed || this.body?.locked) {
+      throw new TypeError('unusable')
+    }
+
+    // 2. Let clonedRequest be the result of cloning this’s request.
+    const clonedRequest = cloneRequest(this[kState])
+
+    // 3. Let clonedRequestObject be the result of creating a Request object,
+    // given clonedRequest, this’s headers’s guard, and this’s relevant Realm.
+    const clonedRequestObject = new Request(kConstruct)
+    clonedRequestObject[kState] = clonedRequest
+    clonedRequestObject[kRealm] = this[kRealm]
+    clonedRequestObject[kHeaders] = new Headers(kConstruct)
+    clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList
+    clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard]
+    clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm]
+
+    // 4. Make clonedRequestObject’s signal follow this’s signal.
+    const ac = new AbortController()
+    if (this.signal.aborted) {
+      ac.abort(this.signal.reason)
+    } else {
+      util.addAbortListener(
+        this.signal,
+        () => {
+          ac.abort(this.signal.reason)
+        }
+      )
+    }
+    clonedRequestObject[kSignal] = ac.signal
+
+    // 4. Return clonedRequestObject.
+    return clonedRequestObject
+  }
+}
+
+mixinBody(Request)
+
+function makeRequest (init) {
+  // https://fetch.spec.whatwg.org/#requests
+  const request = {
+    method: 'GET',
+    localURLsOnly: false,
+    unsafeRequest: false,
+    body: null,
+    client: null,
+    reservedClient: null,
+    replacesClientId: '',
+    window: 'client',
+    keepalive: false,
+    serviceWorkers: 'all',
+    initiator: '',
+    destination: '',
+    priority: null,
+    origin: 'client',
+    policyContainer: 'client',
+    referrer: 'client',
+    referrerPolicy: '',
+    mode: 'no-cors',
+    useCORSPreflightFlag: false,
+    credentials: 'same-origin',
+    useCredentials: false,
+    cache: 'default',
+    redirect: 'follow',
+    integrity: '',
+    cryptoGraphicsNonceMetadata: '',
+    parserMetadata: '',
+    reloadNavigation: false,
+    historyNavigation: false,
+    userActivation: false,
+    taintedOrigin: false,
+    redirectCount: 0,
+    responseTainting: 'basic',
+    preventNoCacheCacheControlHeaderModification: false,
+    done: false,
+    timingAllowFailed: false,
+    ...init,
+    headersList: init.headersList
+      ? new HeadersList(init.headersList)
+      : new HeadersList()
+  }
+  request.url = request.urlList[0]
+  return request
+}
+
+// https://fetch.spec.whatwg.org/#concept-request-clone
+function cloneRequest (request) {
+  // To clone a request request, run these steps:
+
+  // 1. Let newRequest be a copy of request, except for its body.
+  const newRequest = makeRequest({ ...request, body: null })
+
+  // 2. If request’s body is non-null, set newRequest’s body to the
+  // result of cloning request’s body.
+  if (request.body != null) {
+    newRequest.body = cloneBody(request.body)
+  }
+
+  // 3. Return newRequest.
+  return newRequest
+}
+
+Object.defineProperties(Request.prototype, {
+  method: kEnumerableProperty,
+  url: kEnumerableProperty,
+  headers: kEnumerableProperty,
+  redirect: kEnumerableProperty,
+  clone: kEnumerableProperty,
+  signal: kEnumerableProperty,
+  duplex: kEnumerableProperty,
+  destination: kEnumerableProperty,
+  body: kEnumerableProperty,
+  bodyUsed: kEnumerableProperty,
+  isHistoryNavigation: kEnumerableProperty,
+  isReloadNavigation: kEnumerableProperty,
+  keepalive: kEnumerableProperty,
+  integrity: kEnumerableProperty,
+  cache: kEnumerableProperty,
+  credentials: kEnumerableProperty,
+  attribute: kEnumerableProperty,
+  referrerPolicy: kEnumerableProperty,
+  referrer: kEnumerableProperty,
+  mode: kEnumerableProperty,
+  [Symbol.toStringTag]: {
+    value: 'Request',
+    configurable: true
+  }
+})
+
+webidl.converters.Request = webidl.interfaceConverter(
+  Request
+)
+
+// https://fetch.spec.whatwg.org/#requestinfo
+webidl.converters.RequestInfo = function (V) {
+  if (typeof V === 'string') {
+    return webidl.converters.USVString(V)
+  }
+
+  if (V instanceof Request) {
+    return webidl.converters.Request(V)
+  }
+
+  return webidl.converters.USVString(V)
+}
+
+webidl.converters.AbortSignal = webidl.interfaceConverter(
+  AbortSignal
+)
+
+// https://fetch.spec.whatwg.org/#requestinit
+webidl.converters.RequestInit = webidl.dictionaryConverter([
+  {
+    key: 'method',
+    converter: webidl.converters.ByteString
+  },
+  {
+    key: 'headers',
+    converter: webidl.converters.HeadersInit
+  },
+  {
+    key: 'body',
+    converter: webidl.nullableConverter(
+      webidl.converters.BodyInit
+    )
+  },
+  {
+    key: 'referrer',
+    converter: webidl.converters.USVString
+  },
+  {
+    key: 'referrerPolicy',
+    converter: webidl.converters.DOMString,
+    // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy
+    allowedValues: referrerPolicy
+  },
+  {
+    key: 'mode',
+    converter: webidl.converters.DOMString,
+    // https://fetch.spec.whatwg.org/#concept-request-mode
+    allowedValues: requestMode
+  },
+  {
+    key: 'credentials',
+    converter: webidl.converters.DOMString,
+    // https://fetch.spec.whatwg.org/#requestcredentials
+    allowedValues: requestCredentials
+  },
+  {
+    key: 'cache',
+    converter: webidl.converters.DOMString,
+    // https://fetch.spec.whatwg.org/#requestcache
+    allowedValues: requestCache
+  },
+  {
+    key: 'redirect',
+    converter: webidl.converters.DOMString,
+    // https://fetch.spec.whatwg.org/#requestredirect
+    allowedValues: requestRedirect
+  },
+  {
+    key: 'integrity',
+    converter: webidl.converters.DOMString
+  },
+  {
+    key: 'keepalive',
+    converter: webidl.converters.boolean
+  },
+  {
+    key: 'signal',
+    converter: webidl.nullableConverter(
+      (signal) => webidl.converters.AbortSignal(
+        signal,
+        { strict: false }
+      )
+    )
+  },
+  {
+    key: 'window',
+    converter: webidl.converters.any
+  },
+  {
+    key: 'duplex',
+    converter: webidl.converters.DOMString,
+    allowedValues: requestDuplex
+  }
+])
+
+module.exports = { Request, makeRequest }
+
+
+/***/ }),
+
+/***/ 5683:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { Headers, HeadersList, fill } = __nccwpck_require__(2908)
+const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(1380)
+const util = __nccwpck_require__(3465)
+const { kEnumerableProperty } = util
+const {
+  isValidReasonPhrase,
+  isCancelled,
+  isAborted,
+  isBlobLike,
+  serializeJavascriptValueToJSONString,
+  isErrorLike,
+  isomorphicEncode
+} = __nccwpck_require__(2696)
+const {
+  redirectStatusSet,
+  nullBodyStatus,
+  DOMException
+} = __nccwpck_require__(4135)
+const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(8323)
+const { webidl } = __nccwpck_require__(29)
+const { FormData } = __nccwpck_require__(2894)
+const { getGlobalOrigin } = __nccwpck_require__(9963)
+const { URLSerializer } = __nccwpck_require__(3911)
+const { kHeadersList, kConstruct } = __nccwpck_require__(4856)
+const assert = __nccwpck_require__(2613)
+const { types } = __nccwpck_require__(9023)
+
+const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(3774).ReadableStream)
+const textEncoder = new TextEncoder('utf-8')
+
+// https://fetch.spec.whatwg.org/#response-class
+class Response {
+  // Creates network error Response.
+  static error () {
+    // TODO
+    const relevantRealm = { settingsObject: {} }
+
+    // The static error() method steps are to return the result of creating a
+    // Response object, given a new network error, "immutable", and this’s
+    // relevant Realm.
+    const responseObject = new Response()
+    responseObject[kState] = makeNetworkError()
+    responseObject[kRealm] = relevantRealm
+    responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList
+    responseObject[kHeaders][kGuard] = 'immutable'
+    responseObject[kHeaders][kRealm] = relevantRealm
+    return responseObject
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-response-json
+  static json (data, init = {}) {
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' })
+
+    if (init !== null) {
+      init = webidl.converters.ResponseInit(init)
+    }
+
+    // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data.
+    const bytes = textEncoder.encode(
+      serializeJavascriptValueToJSONString(data)
+    )
+
+    // 2. Let body be the result of extracting bytes.
+    const body = extractBody(bytes)
+
+    // 3. Let responseObject be the result of creating a Response object, given a new response,
+    //    "response", and this’s relevant Realm.
+    const relevantRealm = { settingsObject: {} }
+    const responseObject = new Response()
+    responseObject[kRealm] = relevantRealm
+    responseObject[kHeaders][kGuard] = 'response'
+    responseObject[kHeaders][kRealm] = relevantRealm
+
+    // 4. Perform initialize a response given responseObject, init, and (body, "application/json").
+    initializeResponse(responseObject, init, { body: body[0], type: 'application/json' })
+
+    // 5. Return responseObject.
+    return responseObject
+  }
+
+  // Creates a redirect Response that redirects to url with status status.
+  static redirect (url, status = 302) {
+    const relevantRealm = { settingsObject: {} }
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' })
+
+    url = webidl.converters.USVString(url)
+    status = webidl.converters['unsigned short'](status)
+
+    // 1. Let parsedURL be the result of parsing url with current settings
+    // object’s API base URL.
+    // 2. If parsedURL is failure, then throw a TypeError.
+    // TODO: base-URL?
+    let parsedURL
+    try {
+      parsedURL = new URL(url, getGlobalOrigin())
+    } catch (err) {
+      throw Object.assign(new TypeError('Failed to parse URL from ' + url), {
+        cause: err
+      })
+    }
+
+    // 3. If status is not a redirect status, then throw a RangeError.
+    if (!redirectStatusSet.has(status)) {
+      throw new RangeError('Invalid status code ' + status)
+    }
+
+    // 4. Let responseObject be the result of creating a Response object,
+    // given a new response, "immutable", and this’s relevant Realm.
+    const responseObject = new Response()
+    responseObject[kRealm] = relevantRealm
+    responseObject[kHeaders][kGuard] = 'immutable'
+    responseObject[kHeaders][kRealm] = relevantRealm
+
+    // 5. Set responseObject’s response’s status to status.
+    responseObject[kState].status = status
+
+    // 6. Let value be parsedURL, serialized and isomorphic encoded.
+    const value = isomorphicEncode(URLSerializer(parsedURL))
+
+    // 7. Append `Location`/value to responseObject’s response’s header list.
+    responseObject[kState].headersList.append('location', value)
+
+    // 8. Return responseObject.
+    return responseObject
+  }
+
+  // https://fetch.spec.whatwg.org/#dom-response
+  constructor (body = null, init = {}) {
+    if (body !== null) {
+      body = webidl.converters.BodyInit(body)
+    }
+
+    init = webidl.converters.ResponseInit(init)
+
+    // TODO
+    this[kRealm] = { settingsObject: {} }
+
+    // 1. Set this’s response to a new response.
+    this[kState] = makeResponse({})
+
+    // 2. Set this’s headers to a new Headers object with this’s relevant
+    // Realm, whose header list is this’s response’s header list and guard
+    // is "response".
+    this[kHeaders] = new Headers(kConstruct)
+    this[kHeaders][kGuard] = 'response'
+    this[kHeaders][kHeadersList] = this[kState].headersList
+    this[kHeaders][kRealm] = this[kRealm]
+
+    // 3. Let bodyWithType be null.
+    let bodyWithType = null
+
+    // 4. If body is non-null, then set bodyWithType to the result of extracting body.
+    if (body != null) {
+      const [extractedBody, type] = extractBody(body)
+      bodyWithType = { body: extractedBody, type }
+    }
+
+    // 5. Perform initialize a response given this, init, and bodyWithType.
+    initializeResponse(this, init, bodyWithType)
+  }
+
+  // Returns response’s type, e.g., "cors".
+  get type () {
+    webidl.brandCheck(this, Response)
+
+    // The type getter steps are to return this’s response’s type.
+    return this[kState].type
+  }
+
+  // Returns response’s URL, if it has one; otherwise the empty string.
+  get url () {
+    webidl.brandCheck(this, Response)
+
+    const urlList = this[kState].urlList
+
+    // The url getter steps are to return the empty string if this’s
+    // response’s URL is null; otherwise this’s response’s URL,
+    // serialized with exclude fragment set to true.
+    const url = urlList[urlList.length - 1] ?? null
+
+    if (url === null) {
+      return ''
+    }
+
+    return URLSerializer(url, true)
+  }
+
+  // Returns whether response was obtained through a redirect.
+  get redirected () {
+    webidl.brandCheck(this, Response)
+
+    // The redirected getter steps are to return true if this’s response’s URL
+    // list has more than one item; otherwise false.
+    return this[kState].urlList.length > 1
+  }
+
+  // Returns response’s status.
+  get status () {
+    webidl.brandCheck(this, Response)
+
+    // The status getter steps are to return this’s response’s status.
+    return this[kState].status
+  }
+
+  // Returns whether response’s status is an ok status.
+  get ok () {
+    webidl.brandCheck(this, Response)
+
+    // The ok getter steps are to return true if this’s response’s status is an
+    // ok status; otherwise false.
+    return this[kState].status >= 200 && this[kState].status <= 299
+  }
+
+  // Returns response’s status message.
+  get statusText () {
+    webidl.brandCheck(this, Response)
+
+    // The statusText getter steps are to return this’s response’s status
+    // message.
+    return this[kState].statusText
+  }
+
+  // Returns response’s headers as Headers.
+  get headers () {
+    webidl.brandCheck(this, Response)
+
+    // The headers getter steps are to return this’s headers.
+    return this[kHeaders]
+  }
+
+  get body () {
+    webidl.brandCheck(this, Response)
+
+    return this[kState].body ? this[kState].body.stream : null
+  }
+
+  get bodyUsed () {
+    webidl.brandCheck(this, Response)
+
+    return !!this[kState].body && util.isDisturbed(this[kState].body.stream)
+  }
+
+  // Returns a clone of response.
+  clone () {
+    webidl.brandCheck(this, Response)
+
+    // 1. If this is unusable, then throw a TypeError.
+    if (this.bodyUsed || (this.body && this.body.locked)) {
+      throw webidl.errors.exception({
+        header: 'Response.clone',
+        message: 'Body has already been consumed.'
+      })
+    }
+
+    // 2. Let clonedResponse be the result of cloning this’s response.
+    const clonedResponse = cloneResponse(this[kState])
+
+    // 3. Return the result of creating a Response object, given
+    // clonedResponse, this’s headers’s guard, and this’s relevant Realm.
+    const clonedResponseObject = new Response()
+    clonedResponseObject[kState] = clonedResponse
+    clonedResponseObject[kRealm] = this[kRealm]
+    clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList
+    clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard]
+    clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm]
+
+    return clonedResponseObject
+  }
+}
+
+mixinBody(Response)
+
+Object.defineProperties(Response.prototype, {
+  type: kEnumerableProperty,
+  url: kEnumerableProperty,
+  status: kEnumerableProperty,
+  ok: kEnumerableProperty,
+  redirected: kEnumerableProperty,
+  statusText: kEnumerableProperty,
+  headers: kEnumerableProperty,
+  clone: kEnumerableProperty,
+  body: kEnumerableProperty,
+  bodyUsed: kEnumerableProperty,
+  [Symbol.toStringTag]: {
+    value: 'Response',
+    configurable: true
+  }
+})
+
+Object.defineProperties(Response, {
+  json: kEnumerableProperty,
+  redirect: kEnumerableProperty,
+  error: kEnumerableProperty
+})
+
+// https://fetch.spec.whatwg.org/#concept-response-clone
+function cloneResponse (response) {
+  // To clone a response response, run these steps:
+
+  // 1. If response is a filtered response, then return a new identical
+  // filtered response whose internal response is a clone of response’s
+  // internal response.
+  if (response.internalResponse) {
+    return filterResponse(
+      cloneResponse(response.internalResponse),
+      response.type
+    )
+  }
+
+  // 2. Let newResponse be a copy of response, except for its body.
+  const newResponse = makeResponse({ ...response, body: null })
+
+  // 3. If response’s body is non-null, then set newResponse’s body to the
+  // result of cloning response’s body.
+  if (response.body != null) {
+    newResponse.body = cloneBody(response.body)
+  }
+
+  // 4. Return newResponse.
+  return newResponse
+}
+
+function makeResponse (init) {
+  return {
+    aborted: false,
+    rangeRequested: false,
+    timingAllowPassed: false,
+    requestIncludesCredentials: false,
+    type: 'default',
+    status: 200,
+    timingInfo: null,
+    cacheState: '',
+    statusText: '',
+    ...init,
+    headersList: init.headersList
+      ? new HeadersList(init.headersList)
+      : new HeadersList(),
+    urlList: init.urlList ? [...init.urlList] : []
+  }
+}
+
+function makeNetworkError (reason) {
+  const isError = isErrorLike(reason)
+  return makeResponse({
+    type: 'error',
+    status: 0,
+    error: isError
+      ? reason
+      : new Error(reason ? String(reason) : reason),
+    aborted: reason && reason.name === 'AbortError'
+  })
+}
+
+function makeFilteredResponse (response, state) {
+  state = {
+    internalResponse: response,
+    ...state
+  }
+
+  return new Proxy(response, {
+    get (target, p) {
+      return p in state ? state[p] : target[p]
+    },
+    set (target, p, value) {
+      assert(!(p in state))
+      target[p] = value
+      return true
+    }
+  })
+}
+
+// https://fetch.spec.whatwg.org/#concept-filtered-response
+function filterResponse (response, type) {
+  // Set response to the following filtered response with response as its
+  // internal response, depending on request’s response tainting:
+  if (type === 'basic') {
+    // A basic filtered response is a filtered response whose type is "basic"
+    // and header list excludes any headers in internal response’s header list
+    // whose name is a forbidden response-header name.
+
+    // Note: undici does not implement forbidden response-header names
+    return makeFilteredResponse(response, {
+      type: 'basic',
+      headersList: response.headersList
+    })
+  } else if (type === 'cors') {
+    // A CORS filtered response is a filtered response whose type is "cors"
+    // and header list excludes any headers in internal response’s header
+    // list whose name is not a CORS-safelisted response-header name, given
+    // internal response’s CORS-exposed header-name list.
+
+    // Note: undici does not implement CORS-safelisted response-header names
+    return makeFilteredResponse(response, {
+      type: 'cors',
+      headersList: response.headersList
+    })
+  } else if (type === 'opaque') {
+    // An opaque filtered response is a filtered response whose type is
+    // "opaque", URL list is the empty list, status is 0, status message
+    // is the empty byte sequence, header list is empty, and body is null.
+
+    return makeFilteredResponse(response, {
+      type: 'opaque',
+      urlList: Object.freeze([]),
+      status: 0,
+      statusText: '',
+      body: null
+    })
+  } else if (type === 'opaqueredirect') {
+    // An opaque-redirect filtered response is a filtered response whose type
+    // is "opaqueredirect", status is 0, status message is the empty byte
+    // sequence, header list is empty, and body is null.
+
+    return makeFilteredResponse(response, {
+      type: 'opaqueredirect',
+      status: 0,
+      statusText: '',
+      headersList: [],
+      body: null
+    })
+  } else {
+    assert(false)
+  }
+}
+
+// https://fetch.spec.whatwg.org/#appropriate-network-error
+function makeAppropriateNetworkError (fetchParams, err = null) {
+  // 1. Assert: fetchParams is canceled.
+  assert(isCancelled(fetchParams))
+
+  // 2. Return an aborted network error if fetchParams is aborted;
+  // otherwise return a network error.
+  return isAborted(fetchParams)
+    ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))
+    : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))
+}
+
+// https://whatpr.org/fetch/1392.html#initialize-a-response
+function initializeResponse (response, init, body) {
+  // 1. If init["status"] is not in the range 200 to 599, inclusive, then
+  //    throw a RangeError.
+  if (init.status !== null && (init.status < 200 || init.status > 599)) {
+    throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.')
+  }
+
+  // 2. If init["statusText"] does not match the reason-phrase token production,
+  //    then throw a TypeError.
+  if ('statusText' in init && init.statusText != null) {
+    // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2:
+    //   reason-phrase  = *( HTAB / SP / VCHAR / obs-text )
+    if (!isValidReasonPhrase(String(init.statusText))) {
+      throw new TypeError('Invalid statusText')
+    }
+  }
+
+  // 3. Set response’s response’s status to init["status"].
+  if ('status' in init && init.status != null) {
+    response[kState].status = init.status
+  }
+
+  // 4. Set response’s response’s status message to init["statusText"].
+  if ('statusText' in init && init.statusText != null) {
+    response[kState].statusText = init.statusText
+  }
+
+  // 5. If init["headers"] exists, then fill response’s headers with init["headers"].
+  if ('headers' in init && init.headers != null) {
+    fill(response[kHeaders], init.headers)
+  }
+
+  // 6. If body was given, then:
+  if (body) {
+    // 1. If response's status is a null body status, then throw a TypeError.
+    if (nullBodyStatus.includes(response.status)) {
+      throw webidl.errors.exception({
+        header: 'Response constructor',
+        message: 'Invalid response status code ' + response.status
+      })
+    }
+
+    // 2. Set response's body to body's body.
+    response[kState].body = body.body
+
+    // 3. If body's type is non-null and response's header list does not contain
+    //    `Content-Type`, then append (`Content-Type`, body's type) to response's header list.
+    if (body.type != null && !response[kState].headersList.contains('Content-Type')) {
+      response[kState].headersList.append('content-type', body.type)
+    }
+  }
+}
+
+webidl.converters.ReadableStream = webidl.interfaceConverter(
+  ReadableStream
+)
+
+webidl.converters.FormData = webidl.interfaceConverter(
+  FormData
+)
+
+webidl.converters.URLSearchParams = webidl.interfaceConverter(
+  URLSearchParams
+)
+
+// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit
+webidl.converters.XMLHttpRequestBodyInit = function (V) {
+  if (typeof V === 'string') {
+    return webidl.converters.USVString(V)
+  }
+
+  if (isBlobLike(V)) {
+    return webidl.converters.Blob(V, { strict: false })
+  }
+
+  if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) {
+    return webidl.converters.BufferSource(V)
+  }
+
+  if (util.isFormDataLike(V)) {
+    return webidl.converters.FormData(V, { strict: false })
+  }
+
+  if (V instanceof URLSearchParams) {
+    return webidl.converters.URLSearchParams(V)
+  }
+
+  return webidl.converters.DOMString(V)
+}
+
+// https://fetch.spec.whatwg.org/#bodyinit
+webidl.converters.BodyInit = function (V) {
+  if (V instanceof ReadableStream) {
+    return webidl.converters.ReadableStream(V)
+  }
+
+  // Note: the spec doesn't include async iterables,
+  // this is an undici extension.
+  if (V?.[Symbol.asyncIterator]) {
+    return V
+  }
+
+  return webidl.converters.XMLHttpRequestBodyInit(V)
+}
+
+webidl.converters.ResponseInit = webidl.dictionaryConverter([
+  {
+    key: 'status',
+    converter: webidl.converters['unsigned short'],
+    defaultValue: 200
+  },
+  {
+    key: 'statusText',
+    converter: webidl.converters.ByteString,
+    defaultValue: ''
+  },
+  {
+    key: 'headers',
+    converter: webidl.converters.HeadersInit
+  }
+])
+
+module.exports = {
+  makeNetworkError,
+  makeResponse,
+  makeAppropriateNetworkError,
+  filterResponse,
+  Response,
+  cloneResponse
+}
+
+
+/***/ }),
+
+/***/ 8323:
+/***/ ((module) => {
+
+
+
+module.exports = {
+  kUrl: Symbol('url'),
+  kHeaders: Symbol('headers'),
+  kSignal: Symbol('signal'),
+  kState: Symbol('state'),
+  kGuard: Symbol('guard'),
+  kRealm: Symbol('realm')
+}
+
+
+/***/ }),
+
+/***/ 2696:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(4135)
+const { getGlobalOrigin } = __nccwpck_require__(9963)
+const { performance } = __nccwpck_require__(2987)
+const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3465)
+const assert = __nccwpck_require__(2613)
+const { isUint8Array } = __nccwpck_require__(8253)
+
+let supportedHashes = []
+
+// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
+/** @type {import('crypto')|undefined} */
+let crypto
+
+try {
+  crypto = __nccwpck_require__(6982)
+  const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
+  supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
+/* c8 ignore next 3 */
+} catch {
+}
+
+function responseURL (response) {
+  // https://fetch.spec.whatwg.org/#responses
+  // A response has an associated URL. It is a pointer to the last URL
+  // in response’s URL list and null if response’s URL list is empty.
+  const urlList = response.urlList
+  const length = urlList.length
+  return length === 0 ? null : urlList[length - 1].toString()
+}
+
+// https://fetch.spec.whatwg.org/#concept-response-location-url
+function responseLocationURL (response, requestFragment) {
+  // 1. If response’s status is not a redirect status, then return null.
+  if (!redirectStatusSet.has(response.status)) {
+    return null
+  }
+
+  // 2. Let location be the result of extracting header list values given
+  // `Location` and response’s header list.
+  let location = response.headersList.get('location')
+
+  // 3. If location is a header value, then set location to the result of
+  //    parsing location with response’s URL.
+  if (location !== null && isValidHeaderValue(location)) {
+    location = new URL(location, responseURL(response))
+  }
+
+  // 4. If location is a URL whose fragment is null, then set location’s
+  // fragment to requestFragment.
+  if (location && !location.hash) {
+    location.hash = requestFragment
+  }
+
+  // 5. Return location.
+  return location
+}
+
+/** @returns {URL} */
+function requestCurrentURL (request) {
+  return request.urlList[request.urlList.length - 1]
+}
+
+function requestBadPort (request) {
+  // 1. Let url be request’s current URL.
+  const url = requestCurrentURL(request)
+
+  // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port,
+  // then return blocked.
+  if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) {
+    return 'blocked'
+  }
+
+  // 3. Return allowed.
+  return 'allowed'
+}
+
+function isErrorLike (object) {
+  return object instanceof Error || (
+    object?.constructor?.name === 'Error' ||
+    object?.constructor?.name === 'DOMException'
+  )
+}
+
+// Check whether |statusText| is a ByteString and
+// matches the Reason-Phrase token production.
+// RFC 2616: https://tools.ietf.org/html/rfc2616
+// RFC 7230: https://tools.ietf.org/html/rfc7230
+// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )"
+// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116
+function isValidReasonPhrase (statusText) {
+  for (let i = 0; i < statusText.length; ++i) {
+    const c = statusText.charCodeAt(i)
+    if (
+      !(
+        (
+          c === 0x09 || // HTAB
+          (c >= 0x20 && c <= 0x7e) || // SP / VCHAR
+          (c >= 0x80 && c <= 0xff)
+        ) // obs-text
+      )
+    ) {
+      return false
+    }
+  }
+  return true
+}
+
+/**
+ * @see https://tools.ietf.org/html/rfc7230#section-3.2.6
+ * @param {number} c
+ */
+function isTokenCharCode (c) {
+  switch (c) {
+    case 0x22:
+    case 0x28:
+    case 0x29:
+    case 0x2c:
+    case 0x2f:
+    case 0x3a:
+    case 0x3b:
+    case 0x3c:
+    case 0x3d:
+    case 0x3e:
+    case 0x3f:
+    case 0x40:
+    case 0x5b:
+    case 0x5c:
+    case 0x5d:
+    case 0x7b:
+    case 0x7d:
+      // DQUOTE and "(),/:;<=>?@[\]{}"
+      return false
+    default:
+      // VCHAR %x21-7E
+      return c >= 0x21 && c <= 0x7e
+  }
+}
+
+/**
+ * @param {string} characters
+ */
+function isValidHTTPToken (characters) {
+  if (characters.length === 0) {
+    return false
+  }
+  for (let i = 0; i < characters.length; ++i) {
+    if (!isTokenCharCode(characters.charCodeAt(i))) {
+      return false
+    }
+  }
+  return true
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#header-name
+ * @param {string} potentialValue
+ */
+function isValidHeaderName (potentialValue) {
+  return isValidHTTPToken(potentialValue)
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#header-value
+ * @param {string} potentialValue
+ */
+function isValidHeaderValue (potentialValue) {
+  // - Has no leading or trailing HTTP tab or space bytes.
+  // - Contains no 0x00 (NUL) or HTTP newline bytes.
+  if (
+    potentialValue.startsWith('\t') ||
+    potentialValue.startsWith(' ') ||
+    potentialValue.endsWith('\t') ||
+    potentialValue.endsWith(' ')
+  ) {
+    return false
+  }
+
+  if (
+    potentialValue.includes('\0') ||
+    potentialValue.includes('\r') ||
+    potentialValue.includes('\n')
+  ) {
+    return false
+  }
+
+  return true
+}
+
+// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect
+function setRequestReferrerPolicyOnRedirect (request, actualResponse) {
+  //  Given a request request and a response actualResponse, this algorithm
+  //  updates request’s referrer policy according to the Referrer-Policy
+  //  header (if any) in actualResponse.
+
+  // 1. Let policy be the result of executing § 8.1 Parse a referrer policy
+  // from a Referrer-Policy header on actualResponse.
+
+  // 8.1 Parse a referrer policy from a Referrer-Policy header
+  // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list.
+  const { headersList } = actualResponse
+  // 2. Let policy be the empty string.
+  // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token.
+  // 4. Return policy.
+  const policyHeader = (headersList.get('referrer-policy') ?? '').split(',')
+
+  // Note: As the referrer-policy can contain multiple policies
+  // separated by comma, we need to loop through all of them
+  // and pick the first valid one.
+  // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy
+  let policy = ''
+  if (policyHeader.length > 0) {
+    // The right-most policy takes precedence.
+    // The left-most policy is the fallback.
+    for (let i = policyHeader.length; i !== 0; i--) {
+      const token = policyHeader[i - 1].trim()
+      if (referrerPolicyTokens.has(token)) {
+        policy = token
+        break
+      }
+    }
+  }
+
+  // 2. If policy is not the empty string, then set request’s referrer policy to policy.
+  if (policy !== '') {
+    request.referrerPolicy = policy
+  }
+}
+
+// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check
+function crossOriginResourcePolicyCheck () {
+  // TODO
+  return 'allowed'
+}
+
+// https://fetch.spec.whatwg.org/#concept-cors-check
+function corsCheck () {
+  // TODO
+  return 'success'
+}
+
+// https://fetch.spec.whatwg.org/#concept-tao-check
+function TAOCheck () {
+  // TODO
+  return 'success'
+}
+
+function appendFetchMetadata (httpRequest) {
+  //  https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header
+  //  TODO
+
+  //  https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header
+
+  //  1. Assert: r’s url is a potentially trustworthy URL.
+  //  TODO
+
+  //  2. Let header be a Structured Header whose value is a token.
+  let header = null
+
+  //  3. Set header’s value to r’s mode.
+  header = httpRequest.mode
+
+  //  4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list.
+  httpRequest.headersList.set('sec-fetch-mode', header)
+
+  //  https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header
+  //  TODO
+
+  //  https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header
+  //  TODO
+}
+
+// https://fetch.spec.whatwg.org/#append-a-request-origin-header
+function appendRequestOriginHeader (request) {
+  // 1. Let serializedOrigin be the result of byte-serializing a request origin with request.
+  let serializedOrigin = request.origin
+
+  // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list.
+  if (request.responseTainting === 'cors' || request.mode === 'websocket') {
+    if (serializedOrigin) {
+      request.headersList.append('origin', serializedOrigin)
+    }
+
+  // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then:
+  } else if (request.method !== 'GET' && request.method !== 'HEAD') {
+    // 1. Switch on request’s referrer policy:
+    switch (request.referrerPolicy) {
+      case 'no-referrer':
+        // Set serializedOrigin to `null`.
+        serializedOrigin = null
+        break
+      case 'no-referrer-when-downgrade':
+      case 'strict-origin':
+      case 'strict-origin-when-cross-origin':
+        // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`.
+        if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) {
+          serializedOrigin = null
+        }
+        break
+      case 'same-origin':
+        // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`.
+        if (!sameOrigin(request, requestCurrentURL(request))) {
+          serializedOrigin = null
+        }
+        break
+      default:
+        // Do nothing.
+    }
+
+    if (serializedOrigin) {
+      // 2. Append (`Origin`, serializedOrigin) to request’s header list.
+      request.headersList.append('origin', serializedOrigin)
+    }
+  }
+}
+
+function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) {
+  // TODO
+  return performance.now()
+}
+
+// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info
+function createOpaqueTimingInfo (timingInfo) {
+  return {
+    startTime: timingInfo.startTime ?? 0,
+    redirectStartTime: 0,
+    redirectEndTime: 0,
+    postRedirectStartTime: timingInfo.startTime ?? 0,
+    finalServiceWorkerStartTime: 0,
+    finalNetworkResponseStartTime: 0,
+    finalNetworkRequestStartTime: 0,
+    endTime: 0,
+    encodedBodySize: 0,
+    decodedBodySize: 0,
+    finalConnectionTimingInfo: null
+  }
+}
+
+// https://html.spec.whatwg.org/multipage/origin.html#policy-container
+function makePolicyContainer () {
+  // Note: the fetch spec doesn't make use of embedder policy or CSP list
+  return {
+    referrerPolicy: 'strict-origin-when-cross-origin'
+  }
+}
+
+// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container
+function clonePolicyContainer (policyContainer) {
+  return {
+    referrerPolicy: policyContainer.referrerPolicy
+  }
+}
+
+// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
+function determineRequestsReferrer (request) {
+  // 1. Let policy be request's referrer policy.
+  const policy = request.referrerPolicy
+
+  // Note: policy cannot (shouldn't) be null or an empty string.
+  assert(policy)
+
+  // 2. Let environment be request’s client.
+
+  let referrerSource = null
+
+  // 3. Switch on request’s referrer:
+  if (request.referrer === 'client') {
+    // Note: node isn't a browser and doesn't implement document/iframes,
+    // so we bypass this step and replace it with our own.
+
+    const globalOrigin = getGlobalOrigin()
+
+    if (!globalOrigin || globalOrigin.origin === 'null') {
+      return 'no-referrer'
+    }
+
+    // note: we need to clone it as it's mutated
+    referrerSource = new URL(globalOrigin)
+  } else if (request.referrer instanceof URL) {
+    // Let referrerSource be request’s referrer.
+    referrerSource = request.referrer
+  }
+
+  // 4. Let request’s referrerURL be the result of stripping referrerSource for
+  //    use as a referrer.
+  let referrerURL = stripURLForReferrer(referrerSource)
+
+  // 5. Let referrerOrigin be the result of stripping referrerSource for use as
+  //    a referrer, with the origin-only flag set to true.
+  const referrerOrigin = stripURLForReferrer(referrerSource, true)
+
+  // 6. If the result of serializing referrerURL is a string whose length is
+  //    greater than 4096, set referrerURL to referrerOrigin.
+  if (referrerURL.toString().length > 4096) {
+    referrerURL = referrerOrigin
+  }
+
+  const areSameOrigin = sameOrigin(request, referrerURL)
+  const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) &&
+    !isURLPotentiallyTrustworthy(request.url)
+
+  // 8. Execute the switch statements corresponding to the value of policy:
+  switch (policy) {
+    case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true)
+    case 'unsafe-url': return referrerURL
+    case 'same-origin':
+      return areSameOrigin ? referrerOrigin : 'no-referrer'
+    case 'origin-when-cross-origin':
+      return areSameOrigin ? referrerURL : referrerOrigin
+    case 'strict-origin-when-cross-origin': {
+      const currentURL = requestCurrentURL(request)
+
+      // 1. If the origin of referrerURL and the origin of request’s current
+      //    URL are the same, then return referrerURL.
+      if (sameOrigin(referrerURL, currentURL)) {
+        return referrerURL
+      }
+
+      // 2. If referrerURL is a potentially trustworthy URL and request’s
+      //    current URL is not a potentially trustworthy URL, then return no
+      //    referrer.
+      if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {
+        return 'no-referrer'
+      }
+
+      // 3. Return referrerOrigin.
+      return referrerOrigin
+    }
+    case 'strict-origin': // eslint-disable-line
+      /**
+         * 1. If referrerURL is a potentially trustworthy URL and
+         * request’s current URL is not a potentially trustworthy URL,
+         * then return no referrer.
+         * 2. Return referrerOrigin
+        */
+    case 'no-referrer-when-downgrade': // eslint-disable-line
+      /**
+       * 1. If referrerURL is a potentially trustworthy URL and
+       * request’s current URL is not a potentially trustworthy URL,
+       * then return no referrer.
+       * 2. Return referrerOrigin
+      */
+
+    default: // eslint-disable-line
+      return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin
+  }
+}
+
+/**
+ * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url
+ * @param {URL} url
+ * @param {boolean|undefined} originOnly
+ */
+function stripURLForReferrer (url, originOnly) {
+  // 1. Assert: url is a URL.
+  assert(url instanceof URL)
+
+  // 2. If url’s scheme is a local scheme, then return no referrer.
+  if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') {
+    return 'no-referrer'
+  }
+
+  // 3. Set url’s username to the empty string.
+  url.username = ''
+
+  // 4. Set url’s password to the empty string.
+  url.password = ''
+
+  // 5. Set url’s fragment to null.
+  url.hash = ''
+
+  // 6. If the origin-only flag is true, then:
+  if (originOnly) {
+    // 1. Set url’s path to « the empty string ».
+    url.pathname = ''
+
+    // 2. Set url’s query to null.
+    url.search = ''
+  }
+
+  // 7. Return url.
+  return url
+}
+
+function isURLPotentiallyTrustworthy (url) {
+  if (!(url instanceof URL)) {
+    return false
+  }
+
+  // If child of about, return true
+  if (url.href === 'about:blank' || url.href === 'about:srcdoc') {
+    return true
+  }
+
+  // If scheme is data, return true
+  if (url.protocol === 'data:') return true
+
+  // If file, return true
+  if (url.protocol === 'file:') return true
+
+  return isOriginPotentiallyTrustworthy(url.origin)
+
+  function isOriginPotentiallyTrustworthy (origin) {
+    // If origin is explicitly null, return false
+    if (origin == null || origin === 'null') return false
+
+    const originAsURL = new URL(origin)
+
+    // If secure, return true
+    if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') {
+      return true
+    }
+
+    // If localhost or variants, return true
+    if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) ||
+     (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) ||
+     (originAsURL.hostname.endsWith('.localhost'))) {
+      return true
+    }
+
+    // If any other, return false
+    return false
+  }
+}
+
+/**
+ * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
+ * @param {Uint8Array} bytes
+ * @param {string} metadataList
+ */
+function bytesMatch (bytes, metadataList) {
+  // If node is not built with OpenSSL support, we cannot check
+  // a request's integrity, so allow it by default (the spec will
+  // allow requests if an invalid hash is given, as precedence).
+  /* istanbul ignore if: only if node is built with --without-ssl */
+  if (crypto === undefined) {
+    return true
+  }
+
+  // 1. Let parsedMetadata be the result of parsing metadataList.
+  const parsedMetadata = parseMetadata(metadataList)
+
+  // 2. If parsedMetadata is no metadata, return true.
+  if (parsedMetadata === 'no metadata') {
+    return true
+  }
+
+  // 3. If response is not eligible for integrity validation, return false.
+  // TODO
+
+  // 4. If parsedMetadata is the empty set, return true.
+  if (parsedMetadata.length === 0) {
+    return true
+  }
+
+  // 5. Let metadata be the result of getting the strongest
+  //    metadata from parsedMetadata.
+  const strongest = getStrongestMetadata(parsedMetadata)
+  const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
+
+  // 6. For each item in metadata:
+  for (const item of metadata) {
+    // 1. Let algorithm be the alg component of item.
+    const algorithm = item.algo
+
+    // 2. Let expectedValue be the val component of item.
+    const expectedValue = item.hash
+
+    // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
+    // "be liberal with padding". This is annoying, and it's not even in the spec.
+
+    // 3. Let actualValue be the result of applying algorithm to bytes.
+    let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
+
+    if (actualValue[actualValue.length - 1] === '=') {
+      if (actualValue[actualValue.length - 2] === '=') {
+        actualValue = actualValue.slice(0, -2)
+      } else {
+        actualValue = actualValue.slice(0, -1)
+      }
+    }
+
+    // 4. If actualValue is a case-sensitive match for expectedValue,
+    //    return true.
+    if (compareBase64Mixed(actualValue, expectedValue)) {
+      return true
+    }
+  }
+
+  // 7. Return false.
+  return false
+}
+
+// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
+// https://www.w3.org/TR/CSP2/#source-list-syntax
+// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
+const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
+
+/**
+ * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
+ * @param {string} metadata
+ */
+function parseMetadata (metadata) {
+  // 1. Let result be the empty set.
+  /** @type {{ algo: string, hash: string }[]} */
+  const result = []
+
+  // 2. Let empty be equal to true.
+  let empty = true
+
+  // 3. For each token returned by splitting metadata on spaces:
+  for (const token of metadata.split(' ')) {
+    // 1. Set empty to false.
+    empty = false
+
+    // 2. Parse token as a hash-with-options.
+    const parsedToken = parseHashWithOptions.exec(token)
+
+    // 3. If token does not parse, continue to the next token.
+    if (
+      parsedToken === null ||
+      parsedToken.groups === undefined ||
+      parsedToken.groups.algo === undefined
+    ) {
+      // Note: Chromium blocks the request at this point, but Firefox
+      // gives a warning that an invalid integrity was given. The
+      // correct behavior is to ignore these, and subsequently not
+      // check the integrity of the resource.
+      continue
+    }
+
+    // 4. Let algorithm be the hash-algo component of token.
+    const algorithm = parsedToken.groups.algo.toLowerCase()
+
+    // 5. If algorithm is a hash function recognized by the user
+    //    agent, add the parsed token to result.
+    if (supportedHashes.includes(algorithm)) {
+      result.push(parsedToken.groups)
+    }
+  }
+
+  // 4. Return no metadata if empty is true, otherwise return result.
+  if (empty === true) {
+    return 'no metadata'
+  }
+
+  return result
+}
+
+/**
+ * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
+ */
+function getStrongestMetadata (metadataList) {
+  // Let algorithm be the algo component of the first item in metadataList.
+  // Can be sha256
+  let algorithm = metadataList[0].algo
+  // If the algorithm is sha512, then it is the strongest
+  // and we can return immediately
+  if (algorithm[3] === '5') {
+    return algorithm
+  }
+
+  for (let i = 1; i < metadataList.length; ++i) {
+    const metadata = metadataList[i]
+    // If the algorithm is sha512, then it is the strongest
+    // and we can break the loop immediately
+    if (metadata.algo[3] === '5') {
+      algorithm = 'sha512'
+      break
+    // If the algorithm is sha384, then a potential sha256 or sha384 is ignored
+    } else if (algorithm[3] === '3') {
+      continue
+    // algorithm is sha256, check if algorithm is sha384 and if so, set it as
+    // the strongest
+    } else if (metadata.algo[3] === '3') {
+      algorithm = 'sha384'
+    }
+  }
+  return algorithm
+}
+
+function filterMetadataListByAlgorithm (metadataList, algorithm) {
+  if (metadataList.length === 1) {
+    return metadataList
+  }
+
+  let pos = 0
+  for (let i = 0; i < metadataList.length; ++i) {
+    if (metadataList[i].algo === algorithm) {
+      metadataList[pos++] = metadataList[i]
+    }
+  }
+
+  metadataList.length = pos
+
+  return metadataList
+}
+
+/**
+ * Compares two base64 strings, allowing for base64url
+ * in the second string.
+ *
+* @param {string} actualValue always base64
+ * @param {string} expectedValue base64 or base64url
+ * @returns {boolean}
+ */
+function compareBase64Mixed (actualValue, expectedValue) {
+  if (actualValue.length !== expectedValue.length) {
+    return false
+  }
+  for (let i = 0; i < actualValue.length; ++i) {
+    if (actualValue[i] !== expectedValue[i]) {
+      if (
+        (actualValue[i] === '+' && expectedValue[i] === '-') ||
+        (actualValue[i] === '/' && expectedValue[i] === '_')
+      ) {
+        continue
+      }
+      return false
+    }
+  }
+
+  return true
+}
+
+// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
+function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
+  // TODO
+}
+
+/**
+ * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin}
+ * @param {URL} A
+ * @param {URL} B
+ */
+function sameOrigin (A, B) {
+  // 1. If A and B are the same opaque origin, then return true.
+  if (A.origin === B.origin && A.origin === 'null') {
+    return true
+  }
+
+  // 2. If A and B are both tuple origins and their schemes,
+  //    hosts, and port are identical, then return true.
+  if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) {
+    return true
+  }
+
+  // 3. Return false.
+  return false
+}
+
+function createDeferredPromise () {
+  let res
+  let rej
+  const promise = new Promise((resolve, reject) => {
+    res = resolve
+    rej = reject
+  })
+
+  return { promise, resolve: res, reject: rej }
+}
+
+function isAborted (fetchParams) {
+  return fetchParams.controller.state === 'aborted'
+}
+
+function isCancelled (fetchParams) {
+  return fetchParams.controller.state === 'aborted' ||
+    fetchParams.controller.state === 'terminated'
+}
+
+const normalizeMethodRecord = {
+  delete: 'DELETE',
+  DELETE: 'DELETE',
+  get: 'GET',
+  GET: 'GET',
+  head: 'HEAD',
+  HEAD: 'HEAD',
+  options: 'OPTIONS',
+  OPTIONS: 'OPTIONS',
+  post: 'POST',
+  POST: 'POST',
+  put: 'PUT',
+  PUT: 'PUT'
+}
+
+// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
+Object.setPrototypeOf(normalizeMethodRecord, null)
+
+/**
+ * @see https://fetch.spec.whatwg.org/#concept-method-normalize
+ * @param {string} method
+ */
+function normalizeMethod (method) {
+  return normalizeMethodRecord[method.toLowerCase()] ?? method
+}
+
+// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string
+function serializeJavascriptValueToJSONString (value) {
+  // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »).
+  const result = JSON.stringify(value)
+
+  // 2. If result is undefined, then throw a TypeError.
+  if (result === undefined) {
+    throw new TypeError('Value is not JSON serializable')
+  }
+
+  // 3. Assert: result is a string.
+  assert(typeof result === 'string')
+
+  // 4. Return result.
+  return result
+}
+
+// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object
+const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
+
+/**
+ * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object
+ * @param {() => unknown[]} iterator
+ * @param {string} name name of the instance
+ * @param {'key'|'value'|'key+value'} kind
+ */
+function makeIterator (iterator, name, kind) {
+  const object = {
+    index: 0,
+    kind,
+    target: iterator
+  }
+
+  const i = {
+    next () {
+      // 1. Let interface be the interface for which the iterator prototype object exists.
+
+      // 2. Let thisValue be the this value.
+
+      // 3. Let object be ? ToObject(thisValue).
+
+      // 4. If object is a platform object, then perform a security
+      //    check, passing:
+
+      // 5. If object is not a default iterator object for interface,
+      //    then throw a TypeError.
+      if (Object.getPrototypeOf(this) !== i) {
+        throw new TypeError(
+          `'next' called on an object that does not implement interface ${name} Iterator.`
+        )
+      }
+
+      // 6. Let index be object’s index.
+      // 7. Let kind be object’s kind.
+      // 8. Let values be object’s target's value pairs to iterate over.
+      const { index, kind, target } = object
+      const values = target()
+
+      // 9. Let len be the length of values.
+      const len = values.length
+
+      // 10. If index is greater than or equal to len, then return
+      //     CreateIterResultObject(undefined, true).
+      if (index >= len) {
+        return { value: undefined, done: true }
+      }
+
+      // 11. Let pair be the entry in values at index index.
+      const pair = values[index]
+
+      // 12. Set object’s index to index + 1.
+      object.index = index + 1
+
+      // 13. Return the iterator result for pair and kind.
+      return iteratorResult(pair, kind)
+    },
+    // The class string of an iterator prototype object for a given interface is the
+    // result of concatenating the identifier of the interface and the string " Iterator".
+    [Symbol.toStringTag]: `${name} Iterator`
+  }
+
+  // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%.
+  Object.setPrototypeOf(i, esIteratorPrototype)
+  // esIteratorPrototype needs to be the prototype of i
+  // which is the prototype of an empty object. Yes, it's confusing.
+  return Object.setPrototypeOf({}, i)
+}
+
+// https://webidl.spec.whatwg.org/#iterator-result
+function iteratorResult (pair, kind) {
+  let result
+
+  // 1. Let result be a value determined by the value of kind:
+  switch (kind) {
+    case 'key': {
+      // 1. Let idlKey be pair’s key.
+      // 2. Let key be the result of converting idlKey to an
+      //    ECMAScript value.
+      // 3. result is key.
+      result = pair[0]
+      break
+    }
+    case 'value': {
+      // 1. Let idlValue be pair’s value.
+      // 2. Let value be the result of converting idlValue to
+      //    an ECMAScript value.
+      // 3. result is value.
+      result = pair[1]
+      break
+    }
+    case 'key+value': {
+      // 1. Let idlKey be pair’s key.
+      // 2. Let idlValue be pair’s value.
+      // 3. Let key be the result of converting idlKey to an
+      //    ECMAScript value.
+      // 4. Let value be the result of converting idlValue to
+      //    an ECMAScript value.
+      // 5. Let array be ! ArrayCreate(2).
+      // 6. Call ! CreateDataProperty(array, "0", key).
+      // 7. Call ! CreateDataProperty(array, "1", value).
+      // 8. result is array.
+      result = pair
+      break
+    }
+  }
+
+  // 2. Return CreateIterResultObject(result, false).
+  return { value: result, done: false }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#body-fully-read
+ */
+async function fullyReadBody (body, processBody, processBodyError) {
+  // 1. If taskDestination is null, then set taskDestination to
+  //    the result of starting a new parallel queue.
+
+  // 2. Let successSteps given a byte sequence bytes be to queue a
+  //    fetch task to run processBody given bytes, with taskDestination.
+  const successSteps = processBody
+
+  // 3. Let errorSteps be to queue a fetch task to run processBodyError,
+  //    with taskDestination.
+  const errorSteps = processBodyError
+
+  // 4. Let reader be the result of getting a reader for body’s stream.
+  //    If that threw an exception, then run errorSteps with that
+  //    exception and return.
+  let reader
+
+  try {
+    reader = body.stream.getReader()
+  } catch (e) {
+    errorSteps(e)
+    return
+  }
+
+  // 5. Read all bytes from reader, given successSteps and errorSteps.
+  try {
+    const result = await readAllBytes(reader)
+    successSteps(result)
+  } catch (e) {
+    errorSteps(e)
+  }
+}
+
+/** @type {ReadableStream} */
+let ReadableStream = globalThis.ReadableStream
+
+function isReadableStreamLike (stream) {
+  if (!ReadableStream) {
+    ReadableStream = (__nccwpck_require__(3774).ReadableStream)
+  }
+
+  return stream instanceof ReadableStream || (
+    stream[Symbol.toStringTag] === 'ReadableStream' &&
+    typeof stream.tee === 'function'
+  )
+}
+
+const MAXIMUM_ARGUMENT_LENGTH = 65535
+
+/**
+ * @see https://infra.spec.whatwg.org/#isomorphic-decode
+ * @param {number[]|Uint8Array} input
+ */
+function isomorphicDecode (input) {
+  // 1. To isomorphic decode a byte sequence input, return a string whose code point
+  //    length is equal to input’s length and whose code points have the same values
+  //    as the values of input’s bytes, in the same order.
+
+  if (input.length < MAXIMUM_ARGUMENT_LENGTH) {
+    return String.fromCharCode(...input)
+  }
+
+  return input.reduce((previous, current) => previous + String.fromCharCode(current), '')
+}
+
+/**
+ * @param {ReadableStreamController<Uint8Array>} controller
+ */
+function readableStreamClose (controller) {
+  try {
+    controller.close()
+  } catch (err) {
+    // TODO: add comment explaining why this error occurs.
+    if (!err.message.includes('Controller is already closed')) {
+      throw err
+    }
+  }
+}
+
+/**
+ * @see https://infra.spec.whatwg.org/#isomorphic-encode
+ * @param {string} input
+ */
+function isomorphicEncode (input) {
+  // 1. Assert: input contains no code points greater than U+00FF.
+  for (let i = 0; i < input.length; i++) {
+    assert(input.charCodeAt(i) <= 0xFF)
+  }
+
+  // 2. Return a byte sequence whose length is equal to input’s code
+  //    point length and whose bytes have the same values as the
+  //    values of input’s code points, in the same order
+  return input
+}
+
+/**
+ * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes
+ * @see https://streams.spec.whatwg.org/#read-loop
+ * @param {ReadableStreamDefaultReader} reader
+ */
+async function readAllBytes (reader) {
+  const bytes = []
+  let byteLength = 0
+
+  while (true) {
+    const { done, value: chunk } = await reader.read()
+
+    if (done) {
+      // 1. Call successSteps with bytes.
+      return Buffer.concat(bytes, byteLength)
+    }
+
+    // 1. If chunk is not a Uint8Array object, call failureSteps
+    //    with a TypeError and abort these steps.
+    if (!isUint8Array(chunk)) {
+      throw new TypeError('Received non-Uint8Array chunk')
+    }
+
+    // 2. Append the bytes represented by chunk to bytes.
+    bytes.push(chunk)
+    byteLength += chunk.length
+
+    // 3. Read-loop given reader, bytes, successSteps, and failureSteps.
+  }
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#is-local
+ * @param {URL} url
+ */
+function urlIsLocal (url) {
+  assert('protocol' in url) // ensure it's a url object
+
+  const protocol = url.protocol
+
+  return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:'
+}
+
+/**
+ * @param {string|URL} url
+ */
+function urlHasHttpsScheme (url) {
+  if (typeof url === 'string') {
+    return url.startsWith('https:')
+  }
+
+  return url.protocol === 'https:'
+}
+
+/**
+ * @see https://fetch.spec.whatwg.org/#http-scheme
+ * @param {URL} url
+ */
+function urlIsHttpHttpsScheme (url) {
+  assert('protocol' in url) // ensure it's a url object
+
+  const protocol = url.protocol
+
+  return protocol === 'http:' || protocol === 'https:'
+}
+
+/**
+ * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0.
+ */
+const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key))
+
+module.exports = {
+  isAborted,
+  isCancelled,
+  createDeferredPromise,
+  ReadableStreamFrom,
+  toUSVString,
+  tryUpgradeRequestToAPotentiallyTrustworthyURL,
+  coarsenedSharedCurrentTime,
+  determineRequestsReferrer,
+  makePolicyContainer,
+  clonePolicyContainer,
+  appendFetchMetadata,
+  appendRequestOriginHeader,
+  TAOCheck,
+  corsCheck,
+  crossOriginResourcePolicyCheck,
+  createOpaqueTimingInfo,
+  setRequestReferrerPolicyOnRedirect,
+  isValidHTTPToken,
+  requestBadPort,
+  requestCurrentURL,
+  responseURL,
+  responseLocationURL,
+  isBlobLike,
+  isURLPotentiallyTrustworthy,
+  isValidReasonPhrase,
+  sameOrigin,
+  normalizeMethod,
+  serializeJavascriptValueToJSONString,
+  makeIterator,
+  isValidHeaderName,
+  isValidHeaderValue,
+  hasOwn,
+  isErrorLike,
+  fullyReadBody,
+  bytesMatch,
+  isReadableStreamLike,
+  readableStreamClose,
+  isomorphicEncode,
+  isomorphicDecode,
+  urlIsLocal,
+  urlHasHttpsScheme,
+  urlIsHttpHttpsScheme,
+  readAllBytes,
+  normalizeMethodRecord,
+  parseMetadata
+}
+
+
+/***/ }),
+
+/***/ 29:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { types } = __nccwpck_require__(9023)
+const { hasOwn, toUSVString } = __nccwpck_require__(2696)
+
+/** @type {import('../../types/webidl').Webidl} */
+const webidl = {}
+webidl.converters = {}
+webidl.util = {}
+webidl.errors = {}
+
+webidl.errors.exception = function (message) {
+  return new TypeError(`${message.header}: ${message.message}`)
+}
+
+webidl.errors.conversionFailed = function (context) {
+  const plural = context.types.length === 1 ? '' : ' one of'
+  const message =
+    `${context.argument} could not be converted to` +
+    `${plural}: ${context.types.join(', ')}.`
+
+  return webidl.errors.exception({
+    header: context.prefix,
+    message
+  })
+}
+
+webidl.errors.invalidArgument = function (context) {
+  return webidl.errors.exception({
+    header: context.prefix,
+    message: `"${context.value}" is an invalid ${context.type}.`
+  })
+}
+
+// https://webidl.spec.whatwg.org/#implements
+webidl.brandCheck = function (V, I, opts = undefined) {
+  if (opts?.strict !== false && !(V instanceof I)) {
+    throw new TypeError('Illegal invocation')
+  } else {
+    return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag]
+  }
+}
+
+webidl.argumentLengthCheck = function ({ length }, min, ctx) {
+  if (length < min) {
+    throw webidl.errors.exception({
+      message: `${min} argument${min !== 1 ? 's' : ''} required, ` +
+               `but${length ? ' only' : ''} ${length} found.`,
+      ...ctx
+    })
+  }
+}
+
+webidl.illegalConstructor = function () {
+  throw webidl.errors.exception({
+    header: 'TypeError',
+    message: 'Illegal constructor'
+  })
+}
+
+// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values
+webidl.util.Type = function (V) {
+  switch (typeof V) {
+    case 'undefined': return 'Undefined'
+    case 'boolean': return 'Boolean'
+    case 'string': return 'String'
+    case 'symbol': return 'Symbol'
+    case 'number': return 'Number'
+    case 'bigint': return 'BigInt'
+    case 'function':
+    case 'object': {
+      if (V === null) {
+        return 'Null'
+      }
+
+      return 'Object'
+    }
+  }
+}
+
+// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint
+webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) {
+  let upperBound
+  let lowerBound
+
+  // 1. If bitLength is 64, then:
+  if (bitLength === 64) {
+    // 1. Let upperBound be 2^53 − 1.
+    upperBound = Math.pow(2, 53) - 1
+
+    // 2. If signedness is "unsigned", then let lowerBound be 0.
+    if (signedness === 'unsigned') {
+      lowerBound = 0
+    } else {
+      // 3. Otherwise let lowerBound be −2^53 + 1.
+      lowerBound = Math.pow(-2, 53) + 1
+    }
+  } else if (signedness === 'unsigned') {
+    // 2. Otherwise, if signedness is "unsigned", then:
+
+    // 1. Let lowerBound be 0.
+    lowerBound = 0
+
+    // 2. Let upperBound be 2^bitLength − 1.
+    upperBound = Math.pow(2, bitLength) - 1
+  } else {
+    // 3. Otherwise:
+
+    // 1. Let lowerBound be -2^bitLength − 1.
+    lowerBound = Math.pow(-2, bitLength) - 1
+
+    // 2. Let upperBound be 2^bitLength − 1 − 1.
+    upperBound = Math.pow(2, bitLength - 1) - 1
+  }
+
+  // 4. Let x be ? ToNumber(V).
+  let x = Number(V)
+
+  // 5. If x is −0, then set x to +0.
+  if (x === 0) {
+    x = 0
+  }
+
+  // 6. If the conversion is to an IDL type associated
+  //    with the [EnforceRange] extended attribute, then:
+  if (opts.enforceRange === true) {
+    // 1. If x is NaN, +∞, or −∞, then throw a TypeError.
+    if (
+      Number.isNaN(x) ||
+      x === Number.POSITIVE_INFINITY ||
+      x === Number.NEGATIVE_INFINITY
+    ) {
+      throw webidl.errors.exception({
+        header: 'Integer conversion',
+        message: `Could not convert ${V} to an integer.`
+      })
+    }
+
+    // 2. Set x to IntegerPart(x).
+    x = webidl.util.IntegerPart(x)
+
+    // 3. If x < lowerBound or x > upperBound, then
+    //    throw a TypeError.
+    if (x < lowerBound || x > upperBound) {
+      throw webidl.errors.exception({
+        header: 'Integer conversion',
+        message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.`
+      })
+    }
+
+    // 4. Return x.
+    return x
+  }
+
+  // 7. If x is not NaN and the conversion is to an IDL
+  //    type associated with the [Clamp] extended
+  //    attribute, then:
+  if (!Number.isNaN(x) && opts.clamp === true) {
+    // 1. Set x to min(max(x, lowerBound), upperBound).
+    x = Math.min(Math.max(x, lowerBound), upperBound)
+
+    // 2. Round x to the nearest integer, choosing the
+    //    even integer if it lies halfway between two,
+    //    and choosing +0 rather than −0.
+    if (Math.floor(x) % 2 === 0) {
+      x = Math.floor(x)
+    } else {
+      x = Math.ceil(x)
+    }
+
+    // 3. Return x.
+    return x
+  }
+
+  // 8. If x is NaN, +0, +∞, or −∞, then return +0.
+  if (
+    Number.isNaN(x) ||
+    (x === 0 && Object.is(0, x)) ||
+    x === Number.POSITIVE_INFINITY ||
+    x === Number.NEGATIVE_INFINITY
+  ) {
+    return 0
+  }
+
+  // 9. Set x to IntegerPart(x).
+  x = webidl.util.IntegerPart(x)
+
+  // 10. Set x to x modulo 2^bitLength.
+  x = x % Math.pow(2, bitLength)
+
+  // 11. If signedness is "signed" and x ≥ 2^bitLength − 1,
+  //    then return x − 2^bitLength.
+  if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) {
+    return x - Math.pow(2, bitLength)
+  }
+
+  // 12. Otherwise, return x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart
+webidl.util.IntegerPart = function (n) {
+  // 1. Let r be floor(abs(n)).
+  const r = Math.floor(Math.abs(n))
+
+  // 2. If n < 0, then return -1 × r.
+  if (n < 0) {
+    return -1 * r
+  }
+
+  // 3. Otherwise, return r.
+  return r
+}
+
+// https://webidl.spec.whatwg.org/#es-sequence
+webidl.sequenceConverter = function (converter) {
+  return (V) => {
+    // 1. If Type(V) is not Object, throw a TypeError.
+    if (webidl.util.Type(V) !== 'Object') {
+      throw webidl.errors.exception({
+        header: 'Sequence',
+        message: `Value of type ${webidl.util.Type(V)} is not an Object.`
+      })
+    }
+
+    // 2. Let method be ? GetMethod(V, @@iterator).
+    /** @type {Generator} */
+    const method = V?.[Symbol.iterator]?.()
+    const seq = []
+
+    // 3. If method is undefined, throw a TypeError.
+    if (
+      method === undefined ||
+      typeof method.next !== 'function'
+    ) {
+      throw webidl.errors.exception({
+        header: 'Sequence',
+        message: 'Object is not an iterator.'
+      })
+    }
+
+    // https://webidl.spec.whatwg.org/#create-sequence-from-iterable
+    while (true) {
+      const { done, value } = method.next()
+
+      if (done) {
+        break
+      }
+
+      seq.push(converter(value))
+    }
+
+    return seq
+  }
+}
+
+// https://webidl.spec.whatwg.org/#es-to-record
+webidl.recordConverter = function (keyConverter, valueConverter) {
+  return (O) => {
+    // 1. If Type(O) is not Object, throw a TypeError.
+    if (webidl.util.Type(O) !== 'Object') {
+      throw webidl.errors.exception({
+        header: 'Record',
+        message: `Value of type ${webidl.util.Type(O)} is not an Object.`
+      })
+    }
+
+    // 2. Let result be a new empty instance of record<K, V>.
+    const result = {}
+
+    if (!types.isProxy(O)) {
+      // Object.keys only returns enumerable properties
+      const keys = Object.keys(O)
+
+      for (const key of keys) {
+        // 1. Let typedKey be key converted to an IDL value of type K.
+        const typedKey = keyConverter(key)
+
+        // 2. Let value be ? Get(O, key).
+        // 3. Let typedValue be value converted to an IDL value of type V.
+        const typedValue = valueConverter(O[key])
+
+        // 4. Set result[typedKey] to typedValue.
+        result[typedKey] = typedValue
+      }
+
+      // 5. Return result.
+      return result
+    }
+
+    // 3. Let keys be ? O.[[OwnPropertyKeys]]().
+    const keys = Reflect.ownKeys(O)
+
+    // 4. For each key of keys.
+    for (const key of keys) {
+      // 1. Let desc be ? O.[[GetOwnProperty]](key).
+      const desc = Reflect.getOwnPropertyDescriptor(O, key)
+
+      // 2. If desc is not undefined and desc.[[Enumerable]] is true:
+      if (desc?.enumerable) {
+        // 1. Let typedKey be key converted to an IDL value of type K.
+        const typedKey = keyConverter(key)
+
+        // 2. Let value be ? Get(O, key).
+        // 3. Let typedValue be value converted to an IDL value of type V.
+        const typedValue = valueConverter(O[key])
+
+        // 4. Set result[typedKey] to typedValue.
+        result[typedKey] = typedValue
+      }
+    }
+
+    // 5. Return result.
+    return result
+  }
+}
+
+webidl.interfaceConverter = function (i) {
+  return (V, opts = {}) => {
+    if (opts.strict !== false && !(V instanceof i)) {
+      throw webidl.errors.exception({
+        header: i.name,
+        message: `Expected ${V} to be an instance of ${i.name}.`
+      })
+    }
+
+    return V
+  }
+}
+
+webidl.dictionaryConverter = function (converters) {
+  return (dictionary) => {
+    const type = webidl.util.Type(dictionary)
+    const dict = {}
+
+    if (type === 'Null' || type === 'Undefined') {
+      return dict
+    } else if (type !== 'Object') {
+      throw webidl.errors.exception({
+        header: 'Dictionary',
+        message: `Expected ${dictionary} to be one of: Null, Undefined, Object.`
+      })
+    }
+
+    for (const options of converters) {
+      const { key, defaultValue, required, converter } = options
+
+      if (required === true) {
+        if (!hasOwn(dictionary, key)) {
+          throw webidl.errors.exception({
+            header: 'Dictionary',
+            message: `Missing required key "${key}".`
+          })
+        }
+      }
+
+      let value = dictionary[key]
+      const hasDefault = hasOwn(options, 'defaultValue')
+
+      // Only use defaultValue if value is undefined and
+      // a defaultValue options was provided.
+      if (hasDefault && value !== null) {
+        value = value ?? defaultValue
+      }
+
+      // A key can be optional and have no default value.
+      // When this happens, do not perform a conversion,
+      // and do not assign the key a value.
+      if (required || hasDefault || value !== undefined) {
+        value = converter(value)
+
+        if (
+          options.allowedValues &&
+          !options.allowedValues.includes(value)
+        ) {
+          throw webidl.errors.exception({
+            header: 'Dictionary',
+            message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.`
+          })
+        }
+
+        dict[key] = value
+      }
+    }
+
+    return dict
+  }
+}
+
+webidl.nullableConverter = function (converter) {
+  return (V) => {
+    if (V === null) {
+      return V
+    }
+
+    return converter(V)
+  }
+}
+
+// https://webidl.spec.whatwg.org/#es-DOMString
+webidl.converters.DOMString = function (V, opts = {}) {
+  // 1. If V is null and the conversion is to an IDL type
+  //    associated with the [LegacyNullToEmptyString]
+  //    extended attribute, then return the DOMString value
+  //    that represents the empty string.
+  if (V === null && opts.legacyNullToEmptyString) {
+    return ''
+  }
+
+  // 2. Let x be ? ToString(V).
+  if (typeof V === 'symbol') {
+    throw new TypeError('Could not convert argument of type symbol to string.')
+  }
+
+  // 3. Return the IDL DOMString value that represents the
+  //    same sequence of code units as the one the
+  //    ECMAScript String value x represents.
+  return String(V)
+}
+
+// https://webidl.spec.whatwg.org/#es-ByteString
+webidl.converters.ByteString = function (V) {
+  // 1. Let x be ? ToString(V).
+  // Note: DOMString converter perform ? ToString(V)
+  const x = webidl.converters.DOMString(V)
+
+  // 2. If the value of any element of x is greater than
+  //    255, then throw a TypeError.
+  for (let index = 0; index < x.length; index++) {
+    if (x.charCodeAt(index) > 255) {
+      throw new TypeError(
+        'Cannot convert argument to a ByteString because the character at ' +
+        `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.`
+      )
+    }
+  }
+
+  // 3. Return an IDL ByteString value whose length is the
+  //    length of x, and where the value of each element is
+  //    the value of the corresponding element of x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#es-USVString
+webidl.converters.USVString = toUSVString
+
+// https://webidl.spec.whatwg.org/#es-boolean
+webidl.converters.boolean = function (V) {
+  // 1. Let x be the result of computing ToBoolean(V).
+  const x = Boolean(V)
+
+  // 2. Return the IDL boolean value that is the one that represents
+  //    the same truth value as the ECMAScript Boolean value x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#es-any
+webidl.converters.any = function (V) {
+  return V
+}
+
+// https://webidl.spec.whatwg.org/#es-long-long
+webidl.converters['long long'] = function (V) {
+  // 1. Let x be ? ConvertToInt(V, 64, "signed").
+  const x = webidl.util.ConvertToInt(V, 64, 'signed')
+
+  // 2. Return the IDL long long value that represents
+  //    the same numeric value as x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-long-long
+webidl.converters['unsigned long long'] = function (V) {
+  // 1. Let x be ? ConvertToInt(V, 64, "unsigned").
+  const x = webidl.util.ConvertToInt(V, 64, 'unsigned')
+
+  // 2. Return the IDL unsigned long long value that
+  //    represents the same numeric value as x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-long
+webidl.converters['unsigned long'] = function (V) {
+  // 1. Let x be ? ConvertToInt(V, 32, "unsigned").
+  const x = webidl.util.ConvertToInt(V, 32, 'unsigned')
+
+  // 2. Return the IDL unsigned long value that
+  //    represents the same numeric value as x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#es-unsigned-short
+webidl.converters['unsigned short'] = function (V, opts) {
+  // 1. Let x be ? ConvertToInt(V, 16, "unsigned").
+  const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts)
+
+  // 2. Return the IDL unsigned short value that represents
+  //    the same numeric value as x.
+  return x
+}
+
+// https://webidl.spec.whatwg.org/#idl-ArrayBuffer
+webidl.converters.ArrayBuffer = function (V, opts = {}) {
+  // 1. If Type(V) is not Object, or V does not have an
+  //    [[ArrayBufferData]] internal slot, then throw a
+  //    TypeError.
+  // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances
+  // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances
+  if (
+    webidl.util.Type(V) !== 'Object' ||
+    !types.isAnyArrayBuffer(V)
+  ) {
+    throw webidl.errors.conversionFailed({
+      prefix: `${V}`,
+      argument: `${V}`,
+      types: ['ArrayBuffer']
+    })
+  }
+
+  // 2. If the conversion is not to an IDL type associated
+  //    with the [AllowShared] extended attribute, and
+  //    IsSharedArrayBuffer(V) is true, then throw a
+  //    TypeError.
+  if (opts.allowShared === false && types.isSharedArrayBuffer(V)) {
+    throw webidl.errors.exception({
+      header: 'ArrayBuffer',
+      message: 'SharedArrayBuffer is not allowed.'
+    })
+  }
+
+  // 3. If the conversion is not to an IDL type associated
+  //    with the [AllowResizable] extended attribute, and
+  //    IsResizableArrayBuffer(V) is true, then throw a
+  //    TypeError.
+  // Note: resizable ArrayBuffers are currently a proposal.
+
+  // 4. Return the IDL ArrayBuffer value that is a
+  //    reference to the same object as V.
+  return V
+}
+
+webidl.converters.TypedArray = function (V, T, opts = {}) {
+  // 1. Let T be the IDL type V is being converted to.
+
+  // 2. If Type(V) is not Object, or V does not have a
+  //    [[TypedArrayName]] internal slot with a value
+  //    equal to T’s name, then throw a TypeError.
+  if (
+    webidl.util.Type(V) !== 'Object' ||
+    !types.isTypedArray(V) ||
+    V.constructor.name !== T.name
+  ) {
+    throw webidl.errors.conversionFailed({
+      prefix: `${T.name}`,
+      argument: `${V}`,
+      types: [T.name]
+    })
+  }
+
+  // 3. If the conversion is not to an IDL type associated
+  //    with the [AllowShared] extended attribute, and
+  //    IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is
+  //    true, then throw a TypeError.
+  if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
+    throw webidl.errors.exception({
+      header: 'ArrayBuffer',
+      message: 'SharedArrayBuffer is not allowed.'
+    })
+  }
+
+  // 4. If the conversion is not to an IDL type associated
+  //    with the [AllowResizable] extended attribute, and
+  //    IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
+  //    true, then throw a TypeError.
+  // Note: resizable array buffers are currently a proposal
+
+  // 5. Return the IDL value of type T that is a reference
+  //    to the same object as V.
+  return V
+}
+
+webidl.converters.DataView = function (V, opts = {}) {
+  // 1. If Type(V) is not Object, or V does not have a
+  //    [[DataView]] internal slot, then throw a TypeError.
+  if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) {
+    throw webidl.errors.exception({
+      header: 'DataView',
+      message: 'Object is not a DataView.'
+    })
+  }
+
+  // 2. If the conversion is not to an IDL type associated
+  //    with the [AllowShared] extended attribute, and
+  //    IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true,
+  //    then throw a TypeError.
+  if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) {
+    throw webidl.errors.exception({
+      header: 'ArrayBuffer',
+      message: 'SharedArrayBuffer is not allowed.'
+    })
+  }
+
+  // 3. If the conversion is not to an IDL type associated
+  //    with the [AllowResizable] extended attribute, and
+  //    IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is
+  //    true, then throw a TypeError.
+  // Note: resizable ArrayBuffers are currently a proposal
+
+  // 4. Return the IDL DataView value that is a reference
+  //    to the same object as V.
+  return V
+}
+
+// https://webidl.spec.whatwg.org/#BufferSource
+webidl.converters.BufferSource = function (V, opts = {}) {
+  if (types.isAnyArrayBuffer(V)) {
+    return webidl.converters.ArrayBuffer(V, opts)
+  }
+
+  if (types.isTypedArray(V)) {
+    return webidl.converters.TypedArray(V, V.constructor)
+  }
+
+  if (types.isDataView(V)) {
+    return webidl.converters.DataView(V, opts)
+  }
+
+  throw new TypeError(`Could not convert ${V} to a BufferSource.`)
+}
+
+webidl.converters['sequence<ByteString>'] = webidl.sequenceConverter(
+  webidl.converters.ByteString
+)
+
+webidl.converters['sequence<sequence<ByteString>>'] = webidl.sequenceConverter(
+  webidl.converters['sequence<ByteString>']
+)
+
+webidl.converters['record<ByteString, ByteString>'] = webidl.recordConverter(
+  webidl.converters.ByteString,
+  webidl.converters.ByteString
+)
+
+module.exports = {
+  webidl
+}
+
+
+/***/ }),
+
+/***/ 2951:
+/***/ ((module) => {
+
+
+
+/**
+ * @see https://encoding.spec.whatwg.org/#concept-encoding-get
+ * @param {string|undefined} label
+ */
+function getEncoding (label) {
+  if (!label) {
+    return 'failure'
+  }
+
+  // 1. Remove any leading and trailing ASCII whitespace from label.
+  // 2. If label is an ASCII case-insensitive match for any of the
+  //    labels listed in the table below, then return the
+  //    corresponding encoding; otherwise return failure.
+  switch (label.trim().toLowerCase()) {
+    case 'unicode-1-1-utf-8':
+    case 'unicode11utf8':
+    case 'unicode20utf8':
+    case 'utf-8':
+    case 'utf8':
+    case 'x-unicode20utf8':
+      return 'UTF-8'
+    case '866':
+    case 'cp866':
+    case 'csibm866':
+    case 'ibm866':
+      return 'IBM866'
+    case 'csisolatin2':
+    case 'iso-8859-2':
+    case 'iso-ir-101':
+    case 'iso8859-2':
+    case 'iso88592':
+    case 'iso_8859-2':
+    case 'iso_8859-2:1987':
+    case 'l2':
+    case 'latin2':
+      return 'ISO-8859-2'
+    case 'csisolatin3':
+    case 'iso-8859-3':
+    case 'iso-ir-109':
+    case 'iso8859-3':
+    case 'iso88593':
+    case 'iso_8859-3':
+    case 'iso_8859-3:1988':
+    case 'l3':
+    case 'latin3':
+      return 'ISO-8859-3'
+    case 'csisolatin4':
+    case 'iso-8859-4':
+    case 'iso-ir-110':
+    case 'iso8859-4':
+    case 'iso88594':
+    case 'iso_8859-4':
+    case 'iso_8859-4:1988':
+    case 'l4':
+    case 'latin4':
+      return 'ISO-8859-4'
+    case 'csisolatincyrillic':
+    case 'cyrillic':
+    case 'iso-8859-5':
+    case 'iso-ir-144':
+    case 'iso8859-5':
+    case 'iso88595':
+    case 'iso_8859-5':
+    case 'iso_8859-5:1988':
+      return 'ISO-8859-5'
+    case 'arabic':
+    case 'asmo-708':
+    case 'csiso88596e':
+    case 'csiso88596i':
+    case 'csisolatinarabic':
+    case 'ecma-114':
+    case 'iso-8859-6':
+    case 'iso-8859-6-e':
+    case 'iso-8859-6-i':
+    case 'iso-ir-127':
+    case 'iso8859-6':
+    case 'iso88596':
+    case 'iso_8859-6':
+    case 'iso_8859-6:1987':
+      return 'ISO-8859-6'
+    case 'csisolatingreek':
+    case 'ecma-118':
+    case 'elot_928':
+    case 'greek':
+    case 'greek8':
+    case 'iso-8859-7':
+    case 'iso-ir-126':
+    case 'iso8859-7':
+    case 'iso88597':
+    case 'iso_8859-7':
+    case 'iso_8859-7:1987':
+    case 'sun_eu_greek':
+      return 'ISO-8859-7'
+    case 'csiso88598e':
+    case 'csisolatinhebrew':
+    case 'hebrew':
+    case 'iso-8859-8':
+    case 'iso-8859-8-e':
+    case 'iso-ir-138':
+    case 'iso8859-8':
+    case 'iso88598':
+    case 'iso_8859-8':
+    case 'iso_8859-8:1988':
+    case 'visual':
+      return 'ISO-8859-8'
+    case 'csiso88598i':
+    case 'iso-8859-8-i':
+    case 'logical':
+      return 'ISO-8859-8-I'
+    case 'csisolatin6':
+    case 'iso-8859-10':
+    case 'iso-ir-157':
+    case 'iso8859-10':
+    case 'iso885910':
+    case 'l6':
+    case 'latin6':
+      return 'ISO-8859-10'
+    case 'iso-8859-13':
+    case 'iso8859-13':
+    case 'iso885913':
+      return 'ISO-8859-13'
+    case 'iso-8859-14':
+    case 'iso8859-14':
+    case 'iso885914':
+      return 'ISO-8859-14'
+    case 'csisolatin9':
+    case 'iso-8859-15':
+    case 'iso8859-15':
+    case 'iso885915':
+    case 'iso_8859-15':
+    case 'l9':
+      return 'ISO-8859-15'
+    case 'iso-8859-16':
+      return 'ISO-8859-16'
+    case 'cskoi8r':
+    case 'koi':
+    case 'koi8':
+    case 'koi8-r':
+    case 'koi8_r':
+      return 'KOI8-R'
+    case 'koi8-ru':
+    case 'koi8-u':
+      return 'KOI8-U'
+    case 'csmacintosh':
+    case 'mac':
+    case 'macintosh':
+    case 'x-mac-roman':
+      return 'macintosh'
+    case 'iso-8859-11':
+    case 'iso8859-11':
+    case 'iso885911':
+    case 'tis-620':
+    case 'windows-874':
+      return 'windows-874'
+    case 'cp1250':
+    case 'windows-1250':
+    case 'x-cp1250':
+      return 'windows-1250'
+    case 'cp1251':
+    case 'windows-1251':
+    case 'x-cp1251':
+      return 'windows-1251'
+    case 'ansi_x3.4-1968':
+    case 'ascii':
+    case 'cp1252':
+    case 'cp819':
+    case 'csisolatin1':
+    case 'ibm819':
+    case 'iso-8859-1':
+    case 'iso-ir-100':
+    case 'iso8859-1':
+    case 'iso88591':
+    case 'iso_8859-1':
+    case 'iso_8859-1:1987':
+    case 'l1':
+    case 'latin1':
+    case 'us-ascii':
+    case 'windows-1252':
+    case 'x-cp1252':
+      return 'windows-1252'
+    case 'cp1253':
+    case 'windows-1253':
+    case 'x-cp1253':
+      return 'windows-1253'
+    case 'cp1254':
+    case 'csisolatin5':
+    case 'iso-8859-9':
+    case 'iso-ir-148':
+    case 'iso8859-9':
+    case 'iso88599':
+    case 'iso_8859-9':
+    case 'iso_8859-9:1989':
+    case 'l5':
+    case 'latin5':
+    case 'windows-1254':
+    case 'x-cp1254':
+      return 'windows-1254'
+    case 'cp1255':
+    case 'windows-1255':
+    case 'x-cp1255':
+      return 'windows-1255'
+    case 'cp1256':
+    case 'windows-1256':
+    case 'x-cp1256':
+      return 'windows-1256'
+    case 'cp1257':
+    case 'windows-1257':
+    case 'x-cp1257':
+      return 'windows-1257'
+    case 'cp1258':
+    case 'windows-1258':
+    case 'x-cp1258':
+      return 'windows-1258'
+    case 'x-mac-cyrillic':
+    case 'x-mac-ukrainian':
+      return 'x-mac-cyrillic'
+    case 'chinese':
+    case 'csgb2312':
+    case 'csiso58gb231280':
+    case 'gb2312':
+    case 'gb_2312':
+    case 'gb_2312-80':
+    case 'gbk':
+    case 'iso-ir-58':
+    case 'x-gbk':
+      return 'GBK'
+    case 'gb18030':
+      return 'gb18030'
+    case 'big5':
+    case 'big5-hkscs':
+    case 'cn-big5':
+    case 'csbig5':
+    case 'x-x-big5':
+      return 'Big5'
+    case 'cseucpkdfmtjapanese':
+    case 'euc-jp':
+    case 'x-euc-jp':
+      return 'EUC-JP'
+    case 'csiso2022jp':
+    case 'iso-2022-jp':
+      return 'ISO-2022-JP'
+    case 'csshiftjis':
+    case 'ms932':
+    case 'ms_kanji':
+    case 'shift-jis':
+    case 'shift_jis':
+    case 'sjis':
+    case 'windows-31j':
+    case 'x-sjis':
+      return 'Shift_JIS'
+    case 'cseuckr':
+    case 'csksc56011987':
+    case 'euc-kr':
+    case 'iso-ir-149':
+    case 'korean':
+    case 'ks_c_5601-1987':
+    case 'ks_c_5601-1989':
+    case 'ksc5601':
+    case 'ksc_5601':
+    case 'windows-949':
+      return 'EUC-KR'
+    case 'csiso2022kr':
+    case 'hz-gb-2312':
+    case 'iso-2022-cn':
+    case 'iso-2022-cn-ext':
+    case 'iso-2022-kr':
+    case 'replacement':
+      return 'replacement'
+    case 'unicodefffe':
+    case 'utf-16be':
+      return 'UTF-16BE'
+    case 'csunicode':
+    case 'iso-10646-ucs-2':
+    case 'ucs-2':
+    case 'unicode':
+    case 'unicodefeff':
+    case 'utf-16':
+    case 'utf-16le':
+      return 'UTF-16LE'
+    case 'x-user-defined':
+      return 'x-user-defined'
+    default: return 'failure'
+  }
+}
+
+module.exports = {
+  getEncoding
+}
+
+
+/***/ }),
+
+/***/ 2171:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const {
+  staticPropertyDescriptors,
+  readOperation,
+  fireAProgressEvent
+} = __nccwpck_require__(3170)
+const {
+  kState,
+  kError,
+  kResult,
+  kEvents,
+  kAborted
+} = __nccwpck_require__(8057)
+const { webidl } = __nccwpck_require__(29)
+const { kEnumerableProperty } = __nccwpck_require__(3465)
+
+class FileReader extends EventTarget {
+  constructor () {
+    super()
+
+    this[kState] = 'empty'
+    this[kResult] = null
+    this[kError] = null
+    this[kEvents] = {
+      loadend: null,
+      error: null,
+      abort: null,
+      load: null,
+      progress: null,
+      loadstart: null
+    }
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
+   * @param {import('buffer').Blob} blob
+   */
+  readAsArrayBuffer (blob) {
+    webidl.brandCheck(this, FileReader)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' })
+
+    blob = webidl.converters.Blob(blob, { strict: false })
+
+    // The readAsArrayBuffer(blob) method, when invoked,
+    // must initiate a read operation for blob with ArrayBuffer.
+    readOperation(this, blob, 'ArrayBuffer')
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#readAsBinaryString
+   * @param {import('buffer').Blob} blob
+   */
+  readAsBinaryString (blob) {
+    webidl.brandCheck(this, FileReader)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' })
+
+    blob = webidl.converters.Blob(blob, { strict: false })
+
+    // The readAsBinaryString(blob) method, when invoked,
+    // must initiate a read operation for blob with BinaryString.
+    readOperation(this, blob, 'BinaryString')
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#readAsDataText
+   * @param {import('buffer').Blob} blob
+   * @param {string?} encoding
+   */
+  readAsText (blob, encoding = undefined) {
+    webidl.brandCheck(this, FileReader)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' })
+
+    blob = webidl.converters.Blob(blob, { strict: false })
+
+    if (encoding !== undefined) {
+      encoding = webidl.converters.DOMString(encoding)
+    }
+
+    // The readAsText(blob, encoding) method, when invoked,
+    // must initiate a read operation for blob with Text and encoding.
+    readOperation(this, blob, 'Text', encoding)
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL
+   * @param {import('buffer').Blob} blob
+   */
+  readAsDataURL (blob) {
+    webidl.brandCheck(this, FileReader)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' })
+
+    blob = webidl.converters.Blob(blob, { strict: false })
+
+    // The readAsDataURL(blob) method, when invoked, must
+    // initiate a read operation for blob with DataURL.
+    readOperation(this, blob, 'DataURL')
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#dfn-abort
+   */
+  abort () {
+    // 1. If this's state is "empty" or if this's state is
+    //    "done" set this's result to null and terminate
+    //    this algorithm.
+    if (this[kState] === 'empty' || this[kState] === 'done') {
+      this[kResult] = null
+      return
+    }
+
+    // 2. If this's state is "loading" set this's state to
+    //    "done" and set this's result to null.
+    if (this[kState] === 'loading') {
+      this[kState] = 'done'
+      this[kResult] = null
+    }
+
+    // 3. If there are any tasks from this on the file reading
+    //    task source in an affiliated task queue, then remove
+    //    those tasks from that task queue.
+    this[kAborted] = true
+
+    // 4. Terminate the algorithm for the read method being processed.
+    // TODO
+
+    // 5. Fire a progress event called abort at this.
+    fireAProgressEvent('abort', this)
+
+    // 6. If this's state is not "loading", fire a progress
+    //    event called loadend at this.
+    if (this[kState] !== 'loading') {
+      fireAProgressEvent('loadend', this)
+    }
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate
+   */
+  get readyState () {
+    webidl.brandCheck(this, FileReader)
+
+    switch (this[kState]) {
+      case 'empty': return this.EMPTY
+      case 'loading': return this.LOADING
+      case 'done': return this.DONE
+    }
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#dom-filereader-result
+   */
+  get result () {
+    webidl.brandCheck(this, FileReader)
+
+    // The result attribute’s getter, when invoked, must return
+    // this's result.
+    return this[kResult]
+  }
+
+  /**
+   * @see https://w3c.github.io/FileAPI/#dom-filereader-error
+   */
+  get error () {
+    webidl.brandCheck(this, FileReader)
+
+    // The error attribute’s getter, when invoked, must return
+    // this's error.
+    return this[kError]
+  }
+
+  get onloadend () {
+    webidl.brandCheck(this, FileReader)
+
+    return this[kEvents].loadend
+  }
+
+  set onloadend (fn) {
+    webidl.brandCheck(this, FileReader)
+
+    if (this[kEvents].loadend) {
+      this.removeEventListener('loadend', this[kEvents].loadend)
+    }
+
+    if (typeof fn === 'function') {
+      this[kEvents].loadend = fn
+      this.addEventListener('loadend', fn)
+    } else {
+      this[kEvents].loadend = null
+    }
+  }
+
+  get onerror () {
+    webidl.brandCheck(this, FileReader)
+
+    return this[kEvents].error
+  }
+
+  set onerror (fn) {
+    webidl.brandCheck(this, FileReader)
+
+    if (this[kEvents].error) {
+      this.removeEventListener('error', this[kEvents].error)
+    }
+
+    if (typeof fn === 'function') {
+      this[kEvents].error = fn
+      this.addEventListener('error', fn)
+    } else {
+      this[kEvents].error = null
+    }
+  }
+
+  get onloadstart () {
+    webidl.brandCheck(this, FileReader)
+
+    return this[kEvents].loadstart
+  }
+
+  set onloadstart (fn) {
+    webidl.brandCheck(this, FileReader)
+
+    if (this[kEvents].loadstart) {
+      this.removeEventListener('loadstart', this[kEvents].loadstart)
+    }
+
+    if (typeof fn === 'function') {
+      this[kEvents].loadstart = fn
+      this.addEventListener('loadstart', fn)
+    } else {
+      this[kEvents].loadstart = null
+    }
+  }
+
+  get onprogress () {
+    webidl.brandCheck(this, FileReader)
+
+    return this[kEvents].progress
+  }
+
+  set onprogress (fn) {
+    webidl.brandCheck(this, FileReader)
+
+    if (this[kEvents].progress) {
+      this.removeEventListener('progress', this[kEvents].progress)
+    }
+
+    if (typeof fn === 'function') {
+      this[kEvents].progress = fn
+      this.addEventListener('progress', fn)
+    } else {
+      this[kEvents].progress = null
+    }
+  }
+
+  get onload () {
+    webidl.brandCheck(this, FileReader)
+
+    return this[kEvents].load
+  }
+
+  set onload (fn) {
+    webidl.brandCheck(this, FileReader)
+
+    if (this[kEvents].load) {
+      this.removeEventListener('load', this[kEvents].load)
+    }
+
+    if (typeof fn === 'function') {
+      this[kEvents].load = fn
+      this.addEventListener('load', fn)
+    } else {
+      this[kEvents].load = null
+    }
+  }
+
+  get onabort () {
+    webidl.brandCheck(this, FileReader)
+
+    return this[kEvents].abort
+  }
+
+  set onabort (fn) {
+    webidl.brandCheck(this, FileReader)
+
+    if (this[kEvents].abort) {
+      this.removeEventListener('abort', this[kEvents].abort)
+    }
+
+    if (typeof fn === 'function') {
+      this[kEvents].abort = fn
+      this.addEventListener('abort', fn)
+    } else {
+      this[kEvents].abort = null
+    }
+  }
+}
+
+// https://w3c.github.io/FileAPI/#dom-filereader-empty
+FileReader.EMPTY = FileReader.prototype.EMPTY = 0
+// https://w3c.github.io/FileAPI/#dom-filereader-loading
+FileReader.LOADING = FileReader.prototype.LOADING = 1
+// https://w3c.github.io/FileAPI/#dom-filereader-done
+FileReader.DONE = FileReader.prototype.DONE = 2
+
+Object.defineProperties(FileReader.prototype, {
+  EMPTY: staticPropertyDescriptors,
+  LOADING: staticPropertyDescriptors,
+  DONE: staticPropertyDescriptors,
+  readAsArrayBuffer: kEnumerableProperty,
+  readAsBinaryString: kEnumerableProperty,
+  readAsText: kEnumerableProperty,
+  readAsDataURL: kEnumerableProperty,
+  abort: kEnumerableProperty,
+  readyState: kEnumerableProperty,
+  result: kEnumerableProperty,
+  error: kEnumerableProperty,
+  onloadstart: kEnumerableProperty,
+  onprogress: kEnumerableProperty,
+  onload: kEnumerableProperty,
+  onabort: kEnumerableProperty,
+  onerror: kEnumerableProperty,
+  onloadend: kEnumerableProperty,
+  [Symbol.toStringTag]: {
+    value: 'FileReader',
+    writable: false,
+    enumerable: false,
+    configurable: true
+  }
+})
+
+Object.defineProperties(FileReader, {
+  EMPTY: staticPropertyDescriptors,
+  LOADING: staticPropertyDescriptors,
+  DONE: staticPropertyDescriptors
+})
+
+module.exports = {
+  FileReader
+}
+
+
+/***/ }),
+
+/***/ 8389:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { webidl } = __nccwpck_require__(29)
+
+const kState = Symbol('ProgressEvent state')
+
+/**
+ * @see https://xhr.spec.whatwg.org/#progressevent
+ */
+class ProgressEvent extends Event {
+  constructor (type, eventInitDict = {}) {
+    type = webidl.converters.DOMString(type)
+    eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {})
+
+    super(type, eventInitDict)
+
+    this[kState] = {
+      lengthComputable: eventInitDict.lengthComputable,
+      loaded: eventInitDict.loaded,
+      total: eventInitDict.total
+    }
+  }
+
+  get lengthComputable () {
+    webidl.brandCheck(this, ProgressEvent)
+
+    return this[kState].lengthComputable
+  }
+
+  get loaded () {
+    webidl.brandCheck(this, ProgressEvent)
+
+    return this[kState].loaded
+  }
+
+  get total () {
+    webidl.brandCheck(this, ProgressEvent)
+
+    return this[kState].total
+  }
+}
+
+webidl.converters.ProgressEventInit = webidl.dictionaryConverter([
+  {
+    key: 'lengthComputable',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'loaded',
+    converter: webidl.converters['unsigned long long'],
+    defaultValue: 0
+  },
+  {
+    key: 'total',
+    converter: webidl.converters['unsigned long long'],
+    defaultValue: 0
+  },
+  {
+    key: 'bubbles',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'cancelable',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'composed',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  }
+])
+
+module.exports = {
+  ProgressEvent
+}
+
+
+/***/ }),
+
+/***/ 8057:
+/***/ ((module) => {
+
+
+
+module.exports = {
+  kState: Symbol('FileReader state'),
+  kResult: Symbol('FileReader result'),
+  kError: Symbol('FileReader error'),
+  kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'),
+  kEvents: Symbol('FileReader events'),
+  kAborted: Symbol('FileReader aborted')
+}
+
+
+/***/ }),
+
+/***/ 3170:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const {
+  kState,
+  kError,
+  kResult,
+  kAborted,
+  kLastProgressEventFired
+} = __nccwpck_require__(8057)
+const { ProgressEvent } = __nccwpck_require__(8389)
+const { getEncoding } = __nccwpck_require__(2951)
+const { DOMException } = __nccwpck_require__(4135)
+const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(3911)
+const { types } = __nccwpck_require__(9023)
+const { StringDecoder } = __nccwpck_require__(3193)
+const { btoa } = __nccwpck_require__(181)
+
+/** @type {PropertyDescriptor} */
+const staticPropertyDescriptors = {
+  enumerable: true,
+  writable: false,
+  configurable: false
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#readOperation
+ * @param {import('./filereader').FileReader} fr
+ * @param {import('buffer').Blob} blob
+ * @param {string} type
+ * @param {string?} encodingName
+ */
+function readOperation (fr, blob, type, encodingName) {
+  // 1. If fr’s state is "loading", throw an InvalidStateError
+  //    DOMException.
+  if (fr[kState] === 'loading') {
+    throw new DOMException('Invalid state', 'InvalidStateError')
+  }
+
+  // 2. Set fr’s state to "loading".
+  fr[kState] = 'loading'
+
+  // 3. Set fr’s result to null.
+  fr[kResult] = null
+
+  // 4. Set fr’s error to null.
+  fr[kError] = null
+
+  // 5. Let stream be the result of calling get stream on blob.
+  /** @type {import('stream/web').ReadableStream} */
+  const stream = blob.stream()
+
+  // 6. Let reader be the result of getting a reader from stream.
+  const reader = stream.getReader()
+
+  // 7. Let bytes be an empty byte sequence.
+  /** @type {Uint8Array[]} */
+  const bytes = []
+
+  // 8. Let chunkPromise be the result of reading a chunk from
+  //    stream with reader.
+  let chunkPromise = reader.read()
+
+  // 9. Let isFirstChunk be true.
+  let isFirstChunk = true
+
+  // 10. In parallel, while true:
+  // Note: "In parallel" just means non-blocking
+  // Note 2: readOperation itself cannot be async as double
+  // reading the body would then reject the promise, instead
+  // of throwing an error.
+  ;(async () => {
+    while (!fr[kAborted]) {
+      // 1. Wait for chunkPromise to be fulfilled or rejected.
+      try {
+        const { done, value } = await chunkPromise
+
+        // 2. If chunkPromise is fulfilled, and isFirstChunk is
+        //    true, queue a task to fire a progress event called
+        //    loadstart at fr.
+        if (isFirstChunk && !fr[kAborted]) {
+          queueMicrotask(() => {
+            fireAProgressEvent('loadstart', fr)
+          })
+        }
+
+        // 3. Set isFirstChunk to false.
+        isFirstChunk = false
+
+        // 4. If chunkPromise is fulfilled with an object whose
+        //    done property is false and whose value property is
+        //    a Uint8Array object, run these steps:
+        if (!done && types.isUint8Array(value)) {
+          // 1. Let bs be the byte sequence represented by the
+          //    Uint8Array object.
+
+          // 2. Append bs to bytes.
+          bytes.push(value)
+
+          // 3. If roughly 50ms have passed since these steps
+          //    were last invoked, queue a task to fire a
+          //    progress event called progress at fr.
+          if (
+            (
+              fr[kLastProgressEventFired] === undefined ||
+              Date.now() - fr[kLastProgressEventFired] >= 50
+            ) &&
+            !fr[kAborted]
+          ) {
+            fr[kLastProgressEventFired] = Date.now()
+            queueMicrotask(() => {
+              fireAProgressEvent('progress', fr)
+            })
+          }
+
+          // 4. Set chunkPromise to the result of reading a
+          //    chunk from stream with reader.
+          chunkPromise = reader.read()
+        } else if (done) {
+          // 5. Otherwise, if chunkPromise is fulfilled with an
+          //    object whose done property is true, queue a task
+          //    to run the following steps and abort this algorithm:
+          queueMicrotask(() => {
+            // 1. Set fr’s state to "done".
+            fr[kState] = 'done'
+
+            // 2. Let result be the result of package data given
+            //    bytes, type, blob’s type, and encodingName.
+            try {
+              const result = packageData(bytes, type, blob.type, encodingName)
+
+              // 4. Else:
+
+              if (fr[kAborted]) {
+                return
+              }
+
+              // 1. Set fr’s result to result.
+              fr[kResult] = result
+
+              // 2. Fire a progress event called load at the fr.
+              fireAProgressEvent('load', fr)
+            } catch (error) {
+              // 3. If package data threw an exception error:
+
+              // 1. Set fr’s error to error.
+              fr[kError] = error
+
+              // 2. Fire a progress event called error at fr.
+              fireAProgressEvent('error', fr)
+            }
+
+            // 5. If fr’s state is not "loading", fire a progress
+            //    event called loadend at the fr.
+            if (fr[kState] !== 'loading') {
+              fireAProgressEvent('loadend', fr)
+            }
+          })
+
+          break
+        }
+      } catch (error) {
+        if (fr[kAborted]) {
+          return
+        }
+
+        // 6. Otherwise, if chunkPromise is rejected with an
+        //    error error, queue a task to run the following
+        //    steps and abort this algorithm:
+        queueMicrotask(() => {
+          // 1. Set fr’s state to "done".
+          fr[kState] = 'done'
+
+          // 2. Set fr’s error to error.
+          fr[kError] = error
+
+          // 3. Fire a progress event called error at fr.
+          fireAProgressEvent('error', fr)
+
+          // 4. If fr’s state is not "loading", fire a progress
+          //    event called loadend at fr.
+          if (fr[kState] !== 'loading') {
+            fireAProgressEvent('loadend', fr)
+          }
+        })
+
+        break
+      }
+    }
+  })()
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#fire-a-progress-event
+ * @see https://dom.spec.whatwg.org/#concept-event-fire
+ * @param {string} e The name of the event
+ * @param {import('./filereader').FileReader} reader
+ */
+function fireAProgressEvent (e, reader) {
+  // The progress event e does not bubble. e.bubbles must be false
+  // The progress event e is NOT cancelable. e.cancelable must be false
+  const event = new ProgressEvent(e, {
+    bubbles: false,
+    cancelable: false
+  })
+
+  reader.dispatchEvent(event)
+}
+
+/**
+ * @see https://w3c.github.io/FileAPI/#blob-package-data
+ * @param {Uint8Array[]} bytes
+ * @param {string} type
+ * @param {string?} mimeType
+ * @param {string?} encodingName
+ */
+function packageData (bytes, type, mimeType, encodingName) {
+  // 1. A Blob has an associated package data algorithm, given
+  //    bytes, a type, a optional mimeType, and a optional
+  //    encodingName, which switches on type and runs the
+  //    associated steps:
+
+  switch (type) {
+    case 'DataURL': {
+      // 1. Return bytes as a DataURL [RFC2397] subject to
+      //    the considerations below:
+      //  * Use mimeType as part of the Data URL if it is
+      //    available in keeping with the Data URL
+      //    specification [RFC2397].
+      //  * If mimeType is not available return a Data URL
+      //    without a media-type. [RFC2397].
+
+      // https://datatracker.ietf.org/doc/html/rfc2397#section-3
+      // dataurl    := "data:" [ mediatype ] [ ";base64" ] "," data
+      // mediatype  := [ type "/" subtype ] *( ";" parameter )
+      // data       := *urlchar
+      // parameter  := attribute "=" value
+      let dataURL = 'data:'
+
+      const parsed = parseMIMEType(mimeType || 'application/octet-stream')
+
+      if (parsed !== 'failure') {
+        dataURL += serializeAMimeType(parsed)
+      }
+
+      dataURL += ';base64,'
+
+      const decoder = new StringDecoder('latin1')
+
+      for (const chunk of bytes) {
+        dataURL += btoa(decoder.write(chunk))
+      }
+
+      dataURL += btoa(decoder.end())
+
+      return dataURL
+    }
+    case 'Text': {
+      // 1. Let encoding be failure
+      let encoding = 'failure'
+
+      // 2. If the encodingName is present, set encoding to the
+      //    result of getting an encoding from encodingName.
+      if (encodingName) {
+        encoding = getEncoding(encodingName)
+      }
+
+      // 3. If encoding is failure, and mimeType is present:
+      if (encoding === 'failure' && mimeType) {
+        // 1. Let type be the result of parse a MIME type
+        //    given mimeType.
+        const type = parseMIMEType(mimeType)
+
+        // 2. If type is not failure, set encoding to the result
+        //    of getting an encoding from type’s parameters["charset"].
+        if (type !== 'failure') {
+          encoding = getEncoding(type.parameters.get('charset'))
+        }
+      }
+
+      // 4. If encoding is failure, then set encoding to UTF-8.
+      if (encoding === 'failure') {
+        encoding = 'UTF-8'
+      }
+
+      // 5. Decode bytes using fallback encoding encoding, and
+      //    return the result.
+      return decode(bytes, encoding)
+    }
+    case 'ArrayBuffer': {
+      // Return a new ArrayBuffer whose contents are bytes.
+      const sequence = combineByteSequences(bytes)
+
+      return sequence.buffer
+    }
+    case 'BinaryString': {
+      // Return bytes as a binary string, in which every byte
+      //  is represented by a code unit of equal value [0..255].
+      let binaryString = ''
+
+      const decoder = new StringDecoder('latin1')
+
+      for (const chunk of bytes) {
+        binaryString += decoder.write(chunk)
+      }
+
+      binaryString += decoder.end()
+
+      return binaryString
+    }
+  }
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#decode
+ * @param {Uint8Array[]} ioQueue
+ * @param {string} encoding
+ */
+function decode (ioQueue, encoding) {
+  const bytes = combineByteSequences(ioQueue)
+
+  // 1. Let BOMEncoding be the result of BOM sniffing ioQueue.
+  const BOMEncoding = BOMSniffing(bytes)
+
+  let slice = 0
+
+  // 2. If BOMEncoding is non-null:
+  if (BOMEncoding !== null) {
+    // 1. Set encoding to BOMEncoding.
+    encoding = BOMEncoding
+
+    // 2. Read three bytes from ioQueue, if BOMEncoding is
+    //    UTF-8; otherwise read two bytes.
+    //    (Do nothing with those bytes.)
+    slice = BOMEncoding === 'UTF-8' ? 3 : 2
+  }
+
+  // 3. Process a queue with an instance of encoding’s
+  //    decoder, ioQueue, output, and "replacement".
+
+  // 4. Return output.
+
+  const sliced = bytes.slice(slice)
+  return new TextDecoder(encoding).decode(sliced)
+}
+
+/**
+ * @see https://encoding.spec.whatwg.org/#bom-sniff
+ * @param {Uint8Array} ioQueue
+ */
+function BOMSniffing (ioQueue) {
+  // 1. Let BOM be the result of peeking 3 bytes from ioQueue,
+  //    converted to a byte sequence.
+  const [a, b, c] = ioQueue
+
+  // 2. For each of the rows in the table below, starting with
+  //    the first one and going down, if BOM starts with the
+  //    bytes given in the first column, then return the
+  //    encoding given in the cell in the second column of that
+  //    row. Otherwise, return null.
+  if (a === 0xEF && b === 0xBB && c === 0xBF) {
+    return 'UTF-8'
+  } else if (a === 0xFE && b === 0xFF) {
+    return 'UTF-16BE'
+  } else if (a === 0xFF && b === 0xFE) {
+    return 'UTF-16LE'
+  }
+
+  return null
+}
+
+/**
+ * @param {Uint8Array[]} sequences
+ */
+function combineByteSequences (sequences) {
+  const size = sequences.reduce((a, b) => {
+    return a + b.byteLength
+  }, 0)
+
+  let offset = 0
+
+  return sequences.reduce((a, b) => {
+    a.set(b, offset)
+    offset += b.byteLength
+    return a
+  }, new Uint8Array(size))
+}
+
+module.exports = {
+  staticPropertyDescriptors,
+  readOperation,
+  fireAProgressEvent
+}
+
+
+/***/ }),
+
+/***/ 7882:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+// We include a version number for the Dispatcher API. In case of breaking changes,
+// this version number must be increased to avoid conflicts.
+const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+const Agent = __nccwpck_require__(9072)
+
+if (getGlobalDispatcher() === undefined) {
+  setGlobalDispatcher(new Agent())
+}
+
+function setGlobalDispatcher (agent) {
+  if (!agent || typeof agent.dispatch !== 'function') {
+    throw new InvalidArgumentError('Argument agent must implement Agent')
+  }
+  Object.defineProperty(globalThis, globalDispatcher, {
+    value: agent,
+    writable: true,
+    enumerable: false,
+    configurable: false
+  })
+}
+
+function getGlobalDispatcher () {
+  return globalThis[globalDispatcher]
+}
+
+module.exports = {
+  setGlobalDispatcher,
+  getGlobalDispatcher
+}
+
+
+/***/ }),
+
+/***/ 8975:
+/***/ ((module) => {
+
+
+
+module.exports = class DecoratorHandler {
+  constructor (handler) {
+    this.handler = handler
+  }
+
+  onConnect (...args) {
+    return this.handler.onConnect(...args)
+  }
+
+  onError (...args) {
+    return this.handler.onError(...args)
+  }
+
+  onUpgrade (...args) {
+    return this.handler.onUpgrade(...args)
+  }
+
+  onHeaders (...args) {
+    return this.handler.onHeaders(...args)
+  }
+
+  onData (...args) {
+    return this.handler.onData(...args)
+  }
+
+  onComplete (...args) {
+    return this.handler.onComplete(...args)
+  }
+
+  onBodySent (...args) {
+    return this.handler.onBodySent(...args)
+  }
+}
+
+
+/***/ }),
+
+/***/ 238:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const util = __nccwpck_require__(3465)
+const { kBodyUsed } = __nccwpck_require__(4856)
+const assert = __nccwpck_require__(2613)
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+const EE = __nccwpck_require__(4434)
+
+const redirectableStatusCodes = [300, 301, 302, 303, 307, 308]
+
+const kBody = Symbol('body')
+
+class BodyAsyncIterable {
+  constructor (body) {
+    this[kBody] = body
+    this[kBodyUsed] = false
+  }
+
+  async * [Symbol.asyncIterator] () {
+    assert(!this[kBodyUsed], 'disturbed')
+    this[kBodyUsed] = true
+    yield * this[kBody]
+  }
+}
+
+class RedirectHandler {
+  constructor (dispatch, maxRedirections, opts, handler) {
+    if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) {
+      throw new InvalidArgumentError('maxRedirections must be a positive number')
+    }
+
+    util.validateHandler(handler, opts.method, opts.upgrade)
+
+    this.dispatch = dispatch
+    this.location = null
+    this.abort = null
+    this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy
+    this.maxRedirections = maxRedirections
+    this.handler = handler
+    this.history = []
+
+    if (util.isStream(this.opts.body)) {
+      // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp
+      // so that it can be dispatched again?
+      // TODO (fix): Do we need 100-expect support to provide a way to do this properly?
+      if (util.bodyLength(this.opts.body) === 0) {
+        this.opts.body
+          .on('data', function () {
+            assert(false)
+          })
+      }
+
+      if (typeof this.opts.body.readableDidRead !== 'boolean') {
+        this.opts.body[kBodyUsed] = false
+        EE.prototype.on.call(this.opts.body, 'data', function () {
+          this[kBodyUsed] = true
+        })
+      }
+    } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') {
+      // TODO (fix): We can't access ReadableStream internal state
+      // to determine whether or not it has been disturbed. This is just
+      // a workaround.
+      this.opts.body = new BodyAsyncIterable(this.opts.body)
+    } else if (
+      this.opts.body &&
+      typeof this.opts.body !== 'string' &&
+      !ArrayBuffer.isView(this.opts.body) &&
+      util.isIterable(this.opts.body)
+    ) {
+      // TODO: Should we allow re-using iterable if !this.opts.idempotent
+      // or through some other flag?
+      this.opts.body = new BodyAsyncIterable(this.opts.body)
+    }
+  }
+
+  onConnect (abort) {
+    this.abort = abort
+    this.handler.onConnect(abort, { history: this.history })
+  }
+
+  onUpgrade (statusCode, headers, socket) {
+    this.handler.onUpgrade(statusCode, headers, socket)
+  }
+
+  onError (error) {
+    this.handler.onError(error)
+  }
+
+  onHeaders (statusCode, headers, resume, statusText) {
+    this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body)
+      ? null
+      : parseLocation(statusCode, headers)
+
+    if (this.opts.origin) {
+      this.history.push(new URL(this.opts.path, this.opts.origin))
+    }
+
+    if (!this.location) {
+      return this.handler.onHeaders(statusCode, headers, resume, statusText)
+    }
+
+    const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin)))
+    const path = search ? `${pathname}${search}` : pathname
+
+    // Remove headers referring to the original URL.
+    // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers.
+    // https://tools.ietf.org/html/rfc7231#section-6.4
+    this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin)
+    this.opts.path = path
+    this.opts.origin = origin
+    this.opts.maxRedirections = 0
+    this.opts.query = null
+
+    // https://tools.ietf.org/html/rfc7231#section-6.4.4
+    // In case of HTTP 303, always replace method to be either HEAD or GET
+    if (statusCode === 303 && this.opts.method !== 'HEAD') {
+      this.opts.method = 'GET'
+      this.opts.body = null
+    }
+  }
+
+  onData (chunk) {
+    if (this.location) {
+      /*
+        https://tools.ietf.org/html/rfc7231#section-6.4
+
+        TLDR: undici always ignores 3xx response bodies.
+
+        Redirection is used to serve the requested resource from another URL, so it is assumes that
+        no body is generated (and thus can be ignored). Even though generating a body is not prohibited.
+
+        For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually
+        (which means it's optional and not mandated) contain just an hyperlink to the value of
+        the Location response header, so the body can be ignored safely.
+
+        For status 300, which is "Multiple Choices", the spec mentions both generating a Location
+        response header AND a response body with the other possible location to follow.
+        Since the spec explicitily chooses not to specify a format for such body and leave it to
+        servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it.
+      */
+    } else {
+      return this.handler.onData(chunk)
+    }
+  }
+
+  onComplete (trailers) {
+    if (this.location) {
+      /*
+        https://tools.ietf.org/html/rfc7231#section-6.4
+
+        TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections
+        and neither are useful if present.
+
+        See comment on onData method above for more detailed informations.
+      */
+
+      this.location = null
+      this.abort = null
+
+      this.dispatch(this.opts, this)
+    } else {
+      this.handler.onComplete(trailers)
+    }
+  }
+
+  onBodySent (chunk) {
+    if (this.handler.onBodySent) {
+      this.handler.onBodySent(chunk)
+    }
+  }
+}
+
+function parseLocation (statusCode, headers) {
+  if (redirectableStatusCodes.indexOf(statusCode) === -1) {
+    return null
+  }
+
+  for (let i = 0; i < headers.length; i += 2) {
+    if (headers[i].toString().toLowerCase() === 'location') {
+      return headers[i + 1]
+    }
+  }
+}
+
+// https://tools.ietf.org/html/rfc7231#section-6.4.4
+function shouldRemoveHeader (header, removeContent, unknownOrigin) {
+  if (header.length === 4) {
+    return util.headerNameToString(header) === 'host'
+  }
+  if (removeContent && util.headerNameToString(header).startsWith('content-')) {
+    return true
+  }
+  if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
+    const name = util.headerNameToString(header)
+    return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
+  }
+  return false
+}
+
+// https://tools.ietf.org/html/rfc7231#section-6.4
+function cleanRequestHeaders (headers, removeContent, unknownOrigin) {
+  const ret = []
+  if (Array.isArray(headers)) {
+    for (let i = 0; i < headers.length; i += 2) {
+      if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) {
+        ret.push(headers[i], headers[i + 1])
+      }
+    }
+  } else if (headers && typeof headers === 'object') {
+    for (const key of Object.keys(headers)) {
+      if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) {
+        ret.push(key, headers[key])
+      }
+    }
+  } else {
+    assert(headers == null, 'headers must be an object or an array')
+  }
+  return ret
+}
+
+module.exports = RedirectHandler
+
+
+/***/ }),
+
+/***/ 3966:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const assert = __nccwpck_require__(2613)
+
+const { kRetryHandlerDefaultRetry } = __nccwpck_require__(4856)
+const { RequestRetryError } = __nccwpck_require__(3862)
+const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3465)
+
+function calculateRetryAfterHeader (retryAfter) {
+  const current = Date.now()
+  const diff = new Date(retryAfter).getTime() - current
+
+  return diff
+}
+
+class RetryHandler {
+  constructor (opts, handlers) {
+    const { retryOptions, ...dispatchOpts } = opts
+    const {
+      // Retry scoped
+      retry: retryFn,
+      maxRetries,
+      maxTimeout,
+      minTimeout,
+      timeoutFactor,
+      // Response scoped
+      methods,
+      errorCodes,
+      retryAfter,
+      statusCodes
+    } = retryOptions ?? {}
+
+    this.dispatch = handlers.dispatch
+    this.handler = handlers.handler
+    this.opts = dispatchOpts
+    this.abort = null
+    this.aborted = false
+    this.retryOpts = {
+      retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry],
+      retryAfter: retryAfter ?? true,
+      maxTimeout: maxTimeout ?? 30 * 1000, // 30s,
+      timeout: minTimeout ?? 500, // .5s
+      timeoutFactor: timeoutFactor ?? 2,
+      maxRetries: maxRetries ?? 5,
+      // What errors we should retry
+      methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'],
+      // Indicates which errors to retry
+      statusCodes: statusCodes ?? [500, 502, 503, 504, 429],
+      // List of errors to retry
+      errorCodes: errorCodes ?? [
+        'ECONNRESET',
+        'ECONNREFUSED',
+        'ENOTFOUND',
+        'ENETDOWN',
+        'ENETUNREACH',
+        'EHOSTDOWN',
+        'EHOSTUNREACH',
+        'EPIPE'
+      ]
+    }
+
+    this.retryCount = 0
+    this.start = 0
+    this.end = null
+    this.etag = null
+    this.resume = null
+
+    // Handle possible onConnect duplication
+    this.handler.onConnect(reason => {
+      this.aborted = true
+      if (this.abort) {
+        this.abort(reason)
+      } else {
+        this.reason = reason
+      }
+    })
+  }
+
+  onRequestSent () {
+    if (this.handler.onRequestSent) {
+      this.handler.onRequestSent()
+    }
+  }
+
+  onUpgrade (statusCode, headers, socket) {
+    if (this.handler.onUpgrade) {
+      this.handler.onUpgrade(statusCode, headers, socket)
+    }
+  }
+
+  onConnect (abort) {
+    if (this.aborted) {
+      abort(this.reason)
+    } else {
+      this.abort = abort
+    }
+  }
+
+  onBodySent (chunk) {
+    if (this.handler.onBodySent) return this.handler.onBodySent(chunk)
+  }
+
+  static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) {
+    const { statusCode, code, headers } = err
+    const { method, retryOptions } = opts
+    const {
+      maxRetries,
+      timeout,
+      maxTimeout,
+      timeoutFactor,
+      statusCodes,
+      errorCodes,
+      methods
+    } = retryOptions
+    let { counter, currentTimeout } = state
+
+    currentTimeout =
+      currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout
+
+    // Any code that is not a Undici's originated and allowed to retry
+    if (
+      code &&
+      code !== 'UND_ERR_REQ_RETRY' &&
+      code !== 'UND_ERR_SOCKET' &&
+      !errorCodes.includes(code)
+    ) {
+      cb(err)
+      return
+    }
+
+    // If a set of method are provided and the current method is not in the list
+    if (Array.isArray(methods) && !methods.includes(method)) {
+      cb(err)
+      return
+    }
+
+    // If a set of status code are provided and the current status code is not in the list
+    if (
+      statusCode != null &&
+      Array.isArray(statusCodes) &&
+      !statusCodes.includes(statusCode)
+    ) {
+      cb(err)
+      return
+    }
+
+    // If we reached the max number of retries
+    if (counter > maxRetries) {
+      cb(err)
+      return
+    }
+
+    let retryAfterHeader = headers != null && headers['retry-after']
+    if (retryAfterHeader) {
+      retryAfterHeader = Number(retryAfterHeader)
+      retryAfterHeader = isNaN(retryAfterHeader)
+        ? calculateRetryAfterHeader(retryAfterHeader)
+        : retryAfterHeader * 1e3 // Retry-After is in seconds
+    }
+
+    const retryTimeout =
+      retryAfterHeader > 0
+        ? Math.min(retryAfterHeader, maxTimeout)
+        : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout)
+
+    state.currentTimeout = retryTimeout
+
+    setTimeout(() => cb(null), retryTimeout)
+  }
+
+  onHeaders (statusCode, rawHeaders, resume, statusMessage) {
+    const headers = parseHeaders(rawHeaders)
+
+    this.retryCount += 1
+
+    if (statusCode >= 300) {
+      this.abort(
+        new RequestRetryError('Request failed', statusCode, {
+          headers,
+          count: this.retryCount
+        })
+      )
+      return false
+    }
+
+    // Checkpoint for resume from where we left it
+    if (this.resume != null) {
+      this.resume = null
+
+      if (statusCode !== 206) {
+        return true
+      }
+
+      const contentRange = parseRangeHeader(headers['content-range'])
+      // If no content range
+      if (!contentRange) {
+        this.abort(
+          new RequestRetryError('Content-Range mismatch', statusCode, {
+            headers,
+            count: this.retryCount
+          })
+        )
+        return false
+      }
+
+      // Let's start with a weak etag check
+      if (this.etag != null && this.etag !== headers.etag) {
+        this.abort(
+          new RequestRetryError('ETag mismatch', statusCode, {
+            headers,
+            count: this.retryCount
+          })
+        )
+        return false
+      }
+
+      const { start, size, end = size } = contentRange
+
+      assert(this.start === start, 'content-range mismatch')
+      assert(this.end == null || this.end === end, 'content-range mismatch')
+
+      this.resume = resume
+      return true
+    }
+
+    if (this.end == null) {
+      if (statusCode === 206) {
+        // First time we receive 206
+        const range = parseRangeHeader(headers['content-range'])
+
+        if (range == null) {
+          return this.handler.onHeaders(
+            statusCode,
+            rawHeaders,
+            resume,
+            statusMessage
+          )
+        }
+
+        const { start, size, end = size } = range
+
+        assert(
+          start != null && Number.isFinite(start) && this.start !== start,
+          'content-range mismatch'
+        )
+        assert(Number.isFinite(start))
+        assert(
+          end != null && Number.isFinite(end) && this.end !== end,
+          'invalid content-length'
+        )
+
+        this.start = start
+        this.end = end
+      }
+
+      // We make our best to checkpoint the body for further range headers
+      if (this.end == null) {
+        const contentLength = headers['content-length']
+        this.end = contentLength != null ? Number(contentLength) : null
+      }
+
+      assert(Number.isFinite(this.start))
+      assert(
+        this.end == null || Number.isFinite(this.end),
+        'invalid content-length'
+      )
+
+      this.resume = resume
+      this.etag = headers.etag != null ? headers.etag : null
+
+      return this.handler.onHeaders(
+        statusCode,
+        rawHeaders,
+        resume,
+        statusMessage
+      )
+    }
+
+    const err = new RequestRetryError('Request failed', statusCode, {
+      headers,
+      count: this.retryCount
+    })
+
+    this.abort(err)
+
+    return false
+  }
+
+  onData (chunk) {
+    this.start += chunk.length
+
+    return this.handler.onData(chunk)
+  }
+
+  onComplete (rawTrailers) {
+    this.retryCount = 0
+    return this.handler.onComplete(rawTrailers)
+  }
+
+  onError (err) {
+    if (this.aborted || isDisturbed(this.opts.body)) {
+      return this.handler.onError(err)
+    }
+
+    this.retryOpts.retry(
+      err,
+      {
+        state: { counter: this.retryCount++, currentTimeout: this.retryAfter },
+        opts: { retryOptions: this.retryOpts, ...this.opts }
+      },
+      onRetry.bind(this)
+    )
+
+    function onRetry (err) {
+      if (err != null || this.aborted || isDisturbed(this.opts.body)) {
+        return this.handler.onError(err)
+      }
+
+      if (this.start !== 0) {
+        this.opts = {
+          ...this.opts,
+          headers: {
+            ...this.opts.headers,
+            range: `bytes=${this.start}-${this.end ?? ''}`
+          }
+        }
+      }
+
+      try {
+        this.dispatch(this.opts, this)
+      } catch (err) {
+        this.handler.onError(err)
+      }
+    }
+  }
+}
+
+module.exports = RetryHandler
+
+
+/***/ }),
+
+/***/ 2130:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const RedirectHandler = __nccwpck_require__(238)
+
+function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) {
+  return (dispatch) => {
+    return function Intercept (opts, handler) {
+      const { maxRedirections = defaultMaxRedirections } = opts
+
+      if (!maxRedirections) {
+        return dispatch(opts, handler)
+      }
+
+      const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler)
+      opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting.
+      return dispatch(opts, redirectHandler)
+    }
+  }
+}
+
+module.exports = createRedirectInterceptor
+
+
+/***/ }),
+
+/***/ 307:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0;
+const utils_1 = __nccwpck_require__(9619);
+// C headers
+var ERROR;
+(function (ERROR) {
+    ERROR[ERROR["OK"] = 0] = "OK";
+    ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL";
+    ERROR[ERROR["STRICT"] = 2] = "STRICT";
+    ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED";
+    ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH";
+    ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION";
+    ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD";
+    ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL";
+    ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT";
+    ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION";
+    ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN";
+    ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH";
+    ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE";
+    ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS";
+    ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE";
+    ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING";
+    ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN";
+    ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE";
+    ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE";
+    ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER";
+    ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE";
+    ERROR[ERROR["PAUSED"] = 21] = "PAUSED";
+    ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE";
+    ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE";
+    ERROR[ERROR["USER"] = 24] = "USER";
+})(ERROR = exports.ERROR || (exports.ERROR = {}));
+var TYPE;
+(function (TYPE) {
+    TYPE[TYPE["BOTH"] = 0] = "BOTH";
+    TYPE[TYPE["REQUEST"] = 1] = "REQUEST";
+    TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE";
+})(TYPE = exports.TYPE || (exports.TYPE = {}));
+var FLAGS;
+(function (FLAGS) {
+    FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE";
+    FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE";
+    FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE";
+    FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED";
+    FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE";
+    FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH";
+    FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY";
+    FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING";
+    // 1 << 8 is unused
+    FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING";
+})(FLAGS = exports.FLAGS || (exports.FLAGS = {}));
+var LENIENT_FLAGS;
+(function (LENIENT_FLAGS) {
+    LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS";
+    LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH";
+    LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE";
+})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {}));
+var METHODS;
+(function (METHODS) {
+    METHODS[METHODS["DELETE"] = 0] = "DELETE";
+    METHODS[METHODS["GET"] = 1] = "GET";
+    METHODS[METHODS["HEAD"] = 2] = "HEAD";
+    METHODS[METHODS["POST"] = 3] = "POST";
+    METHODS[METHODS["PUT"] = 4] = "PUT";
+    /* pathological */
+    METHODS[METHODS["CONNECT"] = 5] = "CONNECT";
+    METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS";
+    METHODS[METHODS["TRACE"] = 7] = "TRACE";
+    /* WebDAV */
+    METHODS[METHODS["COPY"] = 8] = "COPY";
+    METHODS[METHODS["LOCK"] = 9] = "LOCK";
+    METHODS[METHODS["MKCOL"] = 10] = "MKCOL";
+    METHODS[METHODS["MOVE"] = 11] = "MOVE";
+    METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND";
+    METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH";
+    METHODS[METHODS["SEARCH"] = 14] = "SEARCH";
+    METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK";
+    METHODS[METHODS["BIND"] = 16] = "BIND";
+    METHODS[METHODS["REBIND"] = 17] = "REBIND";
+    METHODS[METHODS["UNBIND"] = 18] = "UNBIND";
+    METHODS[METHODS["ACL"] = 19] = "ACL";
+    /* subversion */
+    METHODS[METHODS["REPORT"] = 20] = "REPORT";
+    METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY";
+    METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT";
+    METHODS[METHODS["MERGE"] = 23] = "MERGE";
+    /* upnp */
+    METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH";
+    METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY";
+    METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE";
+    METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE";
+    /* RFC-5789 */
+    METHODS[METHODS["PATCH"] = 28] = "PATCH";
+    METHODS[METHODS["PURGE"] = 29] = "PURGE";
+    /* CalDAV */
+    METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR";
+    /* RFC-2068, section 19.6.1.2 */
+    METHODS[METHODS["LINK"] = 31] = "LINK";
+    METHODS[METHODS["UNLINK"] = 32] = "UNLINK";
+    /* icecast */
+    METHODS[METHODS["SOURCE"] = 33] = "SOURCE";
+    /* RFC-7540, section 11.6 */
+    METHODS[METHODS["PRI"] = 34] = "PRI";
+    /* RFC-2326 RTSP */
+    METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE";
+    METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE";
+    METHODS[METHODS["SETUP"] = 37] = "SETUP";
+    METHODS[METHODS["PLAY"] = 38] = "PLAY";
+    METHODS[METHODS["PAUSE"] = 39] = "PAUSE";
+    METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN";
+    METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER";
+    METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER";
+    METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT";
+    METHODS[METHODS["RECORD"] = 44] = "RECORD";
+    /* RAOP */
+    METHODS[METHODS["FLUSH"] = 45] = "FLUSH";
+})(METHODS = exports.METHODS || (exports.METHODS = {}));
+exports.METHODS_HTTP = [
+    METHODS.DELETE,
+    METHODS.GET,
+    METHODS.HEAD,
+    METHODS.POST,
+    METHODS.PUT,
+    METHODS.CONNECT,
+    METHODS.OPTIONS,
+    METHODS.TRACE,
+    METHODS.COPY,
+    METHODS.LOCK,
+    METHODS.MKCOL,
+    METHODS.MOVE,
+    METHODS.PROPFIND,
+    METHODS.PROPPATCH,
+    METHODS.SEARCH,
+    METHODS.UNLOCK,
+    METHODS.BIND,
+    METHODS.REBIND,
+    METHODS.UNBIND,
+    METHODS.ACL,
+    METHODS.REPORT,
+    METHODS.MKACTIVITY,
+    METHODS.CHECKOUT,
+    METHODS.MERGE,
+    METHODS['M-SEARCH'],
+    METHODS.NOTIFY,
+    METHODS.SUBSCRIBE,
+    METHODS.UNSUBSCRIBE,
+    METHODS.PATCH,
+    METHODS.PURGE,
+    METHODS.MKCALENDAR,
+    METHODS.LINK,
+    METHODS.UNLINK,
+    METHODS.PRI,
+    // TODO(indutny): should we allow it with HTTP?
+    METHODS.SOURCE,
+];
+exports.METHODS_ICE = [
+    METHODS.SOURCE,
+];
+exports.METHODS_RTSP = [
+    METHODS.OPTIONS,
+    METHODS.DESCRIBE,
+    METHODS.ANNOUNCE,
+    METHODS.SETUP,
+    METHODS.PLAY,
+    METHODS.PAUSE,
+    METHODS.TEARDOWN,
+    METHODS.GET_PARAMETER,
+    METHODS.SET_PARAMETER,
+    METHODS.REDIRECT,
+    METHODS.RECORD,
+    METHODS.FLUSH,
+    // For AirPlay
+    METHODS.GET,
+    METHODS.POST,
+];
+exports.METHOD_MAP = utils_1.enumToMap(METHODS);
+exports.H_METHOD_MAP = {};
+Object.keys(exports.METHOD_MAP).forEach((key) => {
+    if (/^H/.test(key)) {
+        exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key];
+    }
+});
+var FINISH;
+(function (FINISH) {
+    FINISH[FINISH["SAFE"] = 0] = "SAFE";
+    FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB";
+    FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE";
+})(FINISH = exports.FINISH || (exports.FINISH = {}));
+exports.ALPHA = [];
+for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) {
+    // Upper case
+    exports.ALPHA.push(String.fromCharCode(i));
+    // Lower case
+    exports.ALPHA.push(String.fromCharCode(i + 0x20));
+}
+exports.NUM_MAP = {
+    0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
+    5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
+};
+exports.HEX_MAP = {
+    0: 0, 1: 1, 2: 2, 3: 3, 4: 4,
+    5: 5, 6: 6, 7: 7, 8: 8, 9: 9,
+    A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF,
+    a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf,
+};
+exports.NUM = [
+    '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
+];
+exports.ALPHANUM = exports.ALPHA.concat(exports.NUM);
+exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')'];
+exports.USERINFO_CHARS = exports.ALPHANUM
+    .concat(exports.MARK)
+    .concat(['%', ';', ':', '&', '=', '+', '$', ',']);
+// TODO(indutny): use RFC
+exports.STRICT_URL_CHAR = [
+    '!', '"', '$', '%', '&', '\'',
+    '(', ')', '*', '+', ',', '-', '.', '/',
+    ':', ';', '<', '=', '>',
+    '@', '[', '\\', ']', '^', '_',
+    '`',
+    '{', '|', '}', '~',
+].concat(exports.ALPHANUM);
+exports.URL_CHAR = exports.STRICT_URL_CHAR
+    .concat(['\t', '\f']);
+// All characters with 0x80 bit set to 1
+for (let i = 0x80; i <= 0xff; i++) {
+    exports.URL_CHAR.push(i);
+}
+exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']);
+/* Tokens as defined by rfc 2616. Also lowercases them.
+ *        token       = 1*<any CHAR except CTLs or separators>
+ *     separators     = "(" | ")" | "<" | ">" | "@"
+ *                    | "," | ";" | ":" | "\" | <">
+ *                    | "/" | "[" | "]" | "?" | "="
+ *                    | "{" | "}" | SP | HT
+ */
+exports.STRICT_TOKEN = [
+    '!', '#', '$', '%', '&', '\'',
+    '*', '+', '-', '.',
+    '^', '_', '`',
+    '|', '~',
+].concat(exports.ALPHANUM);
+exports.TOKEN = exports.STRICT_TOKEN.concat([' ']);
+/*
+ * Verify that a char is a valid visible (printable) US-ASCII
+ * character or %x80-FF
+ */
+exports.HEADER_CHARS = ['\t'];
+for (let i = 32; i <= 255; i++) {
+    if (i !== 127) {
+        exports.HEADER_CHARS.push(i);
+    }
+}
+// ',' = \x44
+exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44);
+exports.MAJOR = exports.NUM_MAP;
+exports.MINOR = exports.MAJOR;
+var HEADER_STATE;
+(function (HEADER_STATE) {
+    HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL";
+    HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION";
+    HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH";
+    HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING";
+    HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE";
+    HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE";
+    HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE";
+    HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE";
+    HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED";
+})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {}));
+exports.SPECIAL_HEADERS = {
+    'connection': HEADER_STATE.CONNECTION,
+    'content-length': HEADER_STATE.CONTENT_LENGTH,
+    'proxy-connection': HEADER_STATE.CONNECTION,
+    'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING,
+    'upgrade': HEADER_STATE.UPGRADE,
+};
+//# sourceMappingURL=constants.js.map
+
+/***/ }),
+
+/***/ 9741:
+/***/ ((module) => {
+
+module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8='
+
+
+/***/ }),
+
+/***/ 5919:
+/***/ ((module) => {
+
+module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw=='
+
+
+/***/ }),
+
+/***/ 9619:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.enumToMap = void 0;
+function enumToMap(obj) {
+    const res = {};
+    Object.keys(obj).forEach((key) => {
+        const value = obj[key];
+        if (typeof value === 'number') {
+            res[key] = value;
+        }
+    });
+    return res;
+}
+exports.enumToMap = enumToMap;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 5332:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { kClients } = __nccwpck_require__(4856)
+const Agent = __nccwpck_require__(9072)
+const {
+  kAgent,
+  kMockAgentSet,
+  kMockAgentGet,
+  kDispatches,
+  kIsMockActive,
+  kNetConnect,
+  kGetNetConnect,
+  kOptions,
+  kFactory
+} = __nccwpck_require__(8592)
+const MockClient = __nccwpck_require__(962)
+const MockPool = __nccwpck_require__(2975)
+const { matchValue, buildMockOptions } = __nccwpck_require__(2020)
+const { InvalidArgumentError, UndiciError } = __nccwpck_require__(3862)
+const Dispatcher = __nccwpck_require__(9724)
+const Pluralizer = __nccwpck_require__(7012)
+const PendingInterceptorsFormatter = __nccwpck_require__(5507)
+
+class FakeWeakRef {
+  constructor (value) {
+    this.value = value
+  }
+
+  deref () {
+    return this.value
+  }
+}
+
+class MockAgent extends Dispatcher {
+  constructor (opts) {
+    super(opts)
+
+    this[kNetConnect] = true
+    this[kIsMockActive] = true
+
+    // Instantiate Agent and encapsulate
+    if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) {
+      throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+    }
+    const agent = opts && opts.agent ? opts.agent : new Agent(opts)
+    this[kAgent] = agent
+
+    this[kClients] = agent[kClients]
+    this[kOptions] = buildMockOptions(opts)
+  }
+
+  get (origin) {
+    let dispatcher = this[kMockAgentGet](origin)
+
+    if (!dispatcher) {
+      dispatcher = this[kFactory](origin)
+      this[kMockAgentSet](origin, dispatcher)
+    }
+    return dispatcher
+  }
+
+  dispatch (opts, handler) {
+    // Call MockAgent.get to perform additional setup before dispatching as normal
+    this.get(opts.origin)
+    return this[kAgent].dispatch(opts, handler)
+  }
+
+  async close () {
+    await this[kAgent].close()
+    this[kClients].clear()
+  }
+
+  deactivate () {
+    this[kIsMockActive] = false
+  }
+
+  activate () {
+    this[kIsMockActive] = true
+  }
+
+  enableNetConnect (matcher) {
+    if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) {
+      if (Array.isArray(this[kNetConnect])) {
+        this[kNetConnect].push(matcher)
+      } else {
+        this[kNetConnect] = [matcher]
+      }
+    } else if (typeof matcher === 'undefined') {
+      this[kNetConnect] = true
+    } else {
+      throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')
+    }
+  }
+
+  disableNetConnect () {
+    this[kNetConnect] = false
+  }
+
+  // This is required to bypass issues caused by using global symbols - see:
+  // https://github.com/nodejs/undici/issues/1447
+  get isMockActive () {
+    return this[kIsMockActive]
+  }
+
+  [kMockAgentSet] (origin, dispatcher) {
+    this[kClients].set(origin, new FakeWeakRef(dispatcher))
+  }
+
+  [kFactory] (origin) {
+    const mockOptions = Object.assign({ agent: this }, this[kOptions])
+    return this[kOptions] && this[kOptions].connections === 1
+      ? new MockClient(origin, mockOptions)
+      : new MockPool(origin, mockOptions)
+  }
+
+  [kMockAgentGet] (origin) {
+    // First check if we can immediately find it
+    const ref = this[kClients].get(origin)
+    if (ref) {
+      return ref.deref()
+    }
+
+    // If the origin is not a string create a dummy parent pool and return to user
+    if (typeof origin !== 'string') {
+      const dispatcher = this[kFactory]('http://localhost:9999')
+      this[kMockAgentSet](origin, dispatcher)
+      return dispatcher
+    }
+
+    // If we match, create a pool and assign the same dispatches
+    for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) {
+      const nonExplicitDispatcher = nonExplicitRef.deref()
+      if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) {
+        const dispatcher = this[kFactory](origin)
+        this[kMockAgentSet](origin, dispatcher)
+        dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches]
+        return dispatcher
+      }
+    }
+  }
+
+  [kGetNetConnect] () {
+    return this[kNetConnect]
+  }
+
+  pendingInterceptors () {
+    const mockAgentClients = this[kClients]
+
+    return Array.from(mockAgentClients.entries())
+      .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin })))
+      .filter(({ pending }) => pending)
+  }
+
+  assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) {
+    const pending = this.pendingInterceptors()
+
+    if (pending.length === 0) {
+      return
+    }
+
+    const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length)
+
+    throw new UndiciError(`
+${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending:
+
+${pendingInterceptorsFormatter.format(pending)}
+`.trim())
+  }
+}
+
+module.exports = MockAgent
+
+
+/***/ }),
+
+/***/ 962:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { promisify } = __nccwpck_require__(9023)
+const Client = __nccwpck_require__(6646)
+const { buildMockDispatch } = __nccwpck_require__(2020)
+const {
+  kDispatches,
+  kMockAgent,
+  kClose,
+  kOriginalClose,
+  kOrigin,
+  kOriginalDispatch,
+  kConnected
+} = __nccwpck_require__(8592)
+const { MockInterceptor } = __nccwpck_require__(1470)
+const Symbols = __nccwpck_require__(4856)
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+
+/**
+ * MockClient provides an API that extends the Client to influence the mockDispatches.
+ */
+class MockClient extends Client {
+  constructor (origin, opts) {
+    super(origin, opts)
+
+    if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
+      throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+    }
+
+    this[kMockAgent] = opts.agent
+    this[kOrigin] = origin
+    this[kDispatches] = []
+    this[kConnected] = 1
+    this[kOriginalDispatch] = this.dispatch
+    this[kOriginalClose] = this.close.bind(this)
+
+    this.dispatch = buildMockDispatch.call(this)
+    this.close = this[kClose]
+  }
+
+  get [Symbols.kConnected] () {
+    return this[kConnected]
+  }
+
+  /**
+   * Sets up the base interceptor for mocking replies from undici.
+   */
+  intercept (opts) {
+    return new MockInterceptor(opts, this[kDispatches])
+  }
+
+  async [kClose] () {
+    await promisify(this[kOriginalClose])()
+    this[kConnected] = 0
+    this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
+  }
+}
+
+module.exports = MockClient
+
+
+/***/ }),
+
+/***/ 4222:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { UndiciError } = __nccwpck_require__(3862)
+
+class MockNotMatchedError extends UndiciError {
+  constructor (message) {
+    super(message)
+    Error.captureStackTrace(this, MockNotMatchedError)
+    this.name = 'MockNotMatchedError'
+    this.message = message || 'The request does not match any registered mock dispatches'
+    this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED'
+  }
+}
+
+module.exports = {
+  MockNotMatchedError
+}
+
+
+/***/ }),
+
+/***/ 1470:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(2020)
+const {
+  kDispatches,
+  kDispatchKey,
+  kDefaultHeaders,
+  kDefaultTrailers,
+  kContentLength,
+  kMockDispatch
+} = __nccwpck_require__(8592)
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+const { buildURL } = __nccwpck_require__(3465)
+
+/**
+ * Defines the scope API for an interceptor reply
+ */
+class MockScope {
+  constructor (mockDispatch) {
+    this[kMockDispatch] = mockDispatch
+  }
+
+  /**
+   * Delay a reply by a set amount in ms.
+   */
+  delay (waitInMs) {
+    if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) {
+      throw new InvalidArgumentError('waitInMs must be a valid integer > 0')
+    }
+
+    this[kMockDispatch].delay = waitInMs
+    return this
+  }
+
+  /**
+   * For a defined reply, never mark as consumed.
+   */
+  persist () {
+    this[kMockDispatch].persist = true
+    return this
+  }
+
+  /**
+   * Allow one to define a reply for a set amount of matching requests.
+   */
+  times (repeatTimes) {
+    if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) {
+      throw new InvalidArgumentError('repeatTimes must be a valid integer > 0')
+    }
+
+    this[kMockDispatch].times = repeatTimes
+    return this
+  }
+}
+
+/**
+ * Defines an interceptor for a Mock
+ */
+class MockInterceptor {
+  constructor (opts, mockDispatches) {
+    if (typeof opts !== 'object') {
+      throw new InvalidArgumentError('opts must be an object')
+    }
+    if (typeof opts.path === 'undefined') {
+      throw new InvalidArgumentError('opts.path must be defined')
+    }
+    if (typeof opts.method === 'undefined') {
+      opts.method = 'GET'
+    }
+    // See https://github.com/nodejs/undici/issues/1245
+    // As per RFC 3986, clients are not supposed to send URI
+    // fragments to servers when they retrieve a document,
+    if (typeof opts.path === 'string') {
+      if (opts.query) {
+        opts.path = buildURL(opts.path, opts.query)
+      } else {
+        // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811
+        const parsedURL = new URL(opts.path, 'data://')
+        opts.path = parsedURL.pathname + parsedURL.search
+      }
+    }
+    if (typeof opts.method === 'string') {
+      opts.method = opts.method.toUpperCase()
+    }
+
+    this[kDispatchKey] = buildKey(opts)
+    this[kDispatches] = mockDispatches
+    this[kDefaultHeaders] = {}
+    this[kDefaultTrailers] = {}
+    this[kContentLength] = false
+  }
+
+  createMockScopeDispatchData (statusCode, data, responseOptions = {}) {
+    const responseData = getResponseData(data)
+    const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {}
+    const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }
+    const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }
+
+    return { statusCode, data, headers, trailers }
+  }
+
+  validateReplyParameters (statusCode, data, responseOptions) {
+    if (typeof statusCode === 'undefined') {
+      throw new InvalidArgumentError('statusCode must be defined')
+    }
+    if (typeof data === 'undefined') {
+      throw new InvalidArgumentError('data must be defined')
+    }
+    if (typeof responseOptions !== 'object') {
+      throw new InvalidArgumentError('responseOptions must be an object')
+    }
+  }
+
+  /**
+   * Mock an undici request with a defined reply.
+   */
+  reply (replyData) {
+    // Values of reply aren't available right now as they
+    // can only be available when the reply callback is invoked.
+    if (typeof replyData === 'function') {
+      // We'll first wrap the provided callback in another function,
+      // this function will properly resolve the data from the callback
+      // when invoked.
+      const wrappedDefaultsCallback = (opts) => {
+        // Our reply options callback contains the parameter for statusCode, data and options.
+        const resolvedData = replyData(opts)
+
+        // Check if it is in the right format
+        if (typeof resolvedData !== 'object') {
+          throw new InvalidArgumentError('reply options callback must return an object')
+        }
+
+        const { statusCode, data = '', responseOptions = {} } = resolvedData
+        this.validateReplyParameters(statusCode, data, responseOptions)
+        // Since the values can be obtained immediately we return them
+        // from this higher order function that will be resolved later.
+        return {
+          ...this.createMockScopeDispatchData(statusCode, data, responseOptions)
+        }
+      }
+
+      // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data.
+      const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback)
+      return new MockScope(newMockDispatch)
+    }
+
+    // We can have either one or three parameters, if we get here,
+    // we should have 1-3 parameters. So we spread the arguments of
+    // this function to obtain the parameters, since replyData will always
+    // just be the statusCode.
+    const [statusCode, data = '', responseOptions = {}] = [...arguments]
+    this.validateReplyParameters(statusCode, data, responseOptions)
+
+    // Send in-already provided data like usual
+    const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions)
+    const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData)
+    return new MockScope(newMockDispatch)
+  }
+
+  /**
+   * Mock an undici request with a defined error.
+   */
+  replyWithError (error) {
+    if (typeof error === 'undefined') {
+      throw new InvalidArgumentError('error must be defined')
+    }
+
+    const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error })
+    return new MockScope(newMockDispatch)
+  }
+
+  /**
+   * Set default reply headers on the interceptor for subsequent replies
+   */
+  defaultReplyHeaders (headers) {
+    if (typeof headers === 'undefined') {
+      throw new InvalidArgumentError('headers must be defined')
+    }
+
+    this[kDefaultHeaders] = headers
+    return this
+  }
+
+  /**
+   * Set default reply trailers on the interceptor for subsequent replies
+   */
+  defaultReplyTrailers (trailers) {
+    if (typeof trailers === 'undefined') {
+      throw new InvalidArgumentError('trailers must be defined')
+    }
+
+    this[kDefaultTrailers] = trailers
+    return this
+  }
+
+  /**
+   * Set reply content length header for replies on the interceptor
+   */
+  replyContentLength () {
+    this[kContentLength] = true
+    return this
+  }
+}
+
+module.exports.MockInterceptor = MockInterceptor
+module.exports.MockScope = MockScope
+
+
+/***/ }),
+
+/***/ 2975:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { promisify } = __nccwpck_require__(9023)
+const Pool = __nccwpck_require__(7787)
+const { buildMockDispatch } = __nccwpck_require__(2020)
+const {
+  kDispatches,
+  kMockAgent,
+  kClose,
+  kOriginalClose,
+  kOrigin,
+  kOriginalDispatch,
+  kConnected
+} = __nccwpck_require__(8592)
+const { MockInterceptor } = __nccwpck_require__(1470)
+const Symbols = __nccwpck_require__(4856)
+const { InvalidArgumentError } = __nccwpck_require__(3862)
+
+/**
+ * MockPool provides an API that extends the Pool to influence the mockDispatches.
+ */
+class MockPool extends Pool {
+  constructor (origin, opts) {
+    super(origin, opts)
+
+    if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') {
+      throw new InvalidArgumentError('Argument opts.agent must implement Agent')
+    }
+
+    this[kMockAgent] = opts.agent
+    this[kOrigin] = origin
+    this[kDispatches] = []
+    this[kConnected] = 1
+    this[kOriginalDispatch] = this.dispatch
+    this[kOriginalClose] = this.close.bind(this)
+
+    this.dispatch = buildMockDispatch.call(this)
+    this.close = this[kClose]
+  }
+
+  get [Symbols.kConnected] () {
+    return this[kConnected]
+  }
+
+  /**
+   * Sets up the base interceptor for mocking replies from undici.
+   */
+  intercept (opts) {
+    return new MockInterceptor(opts, this[kDispatches])
+  }
+
+  async [kClose] () {
+    await promisify(this[kOriginalClose])()
+    this[kConnected] = 0
+    this[kMockAgent][Symbols.kClients].delete(this[kOrigin])
+  }
+}
+
+module.exports = MockPool
+
+
+/***/ }),
+
+/***/ 8592:
+/***/ ((module) => {
+
+
+
+module.exports = {
+  kAgent: Symbol('agent'),
+  kOptions: Symbol('options'),
+  kFactory: Symbol('factory'),
+  kDispatches: Symbol('dispatches'),
+  kDispatchKey: Symbol('dispatch key'),
+  kDefaultHeaders: Symbol('default headers'),
+  kDefaultTrailers: Symbol('default trailers'),
+  kContentLength: Symbol('content length'),
+  kMockAgent: Symbol('mock agent'),
+  kMockAgentSet: Symbol('mock agent set'),
+  kMockAgentGet: Symbol('mock agent get'),
+  kMockDispatch: Symbol('mock dispatch'),
+  kClose: Symbol('close'),
+  kOriginalClose: Symbol('original agent close'),
+  kOrigin: Symbol('origin'),
+  kIsMockActive: Symbol('is mock active'),
+  kNetConnect: Symbol('net connect'),
+  kGetNetConnect: Symbol('get net connect'),
+  kConnected: Symbol('connected')
+}
+
+
+/***/ }),
+
+/***/ 2020:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { MockNotMatchedError } = __nccwpck_require__(4222)
+const {
+  kDispatches,
+  kMockAgent,
+  kOriginalDispatch,
+  kOrigin,
+  kGetNetConnect
+} = __nccwpck_require__(8592)
+const { buildURL, nop } = __nccwpck_require__(3465)
+const { STATUS_CODES } = __nccwpck_require__(8611)
+const {
+  types: {
+    isPromise
+  }
+} = __nccwpck_require__(9023)
+
+function matchValue (match, value) {
+  if (typeof match === 'string') {
+    return match === value
+  }
+  if (match instanceof RegExp) {
+    return match.test(value)
+  }
+  if (typeof match === 'function') {
+    return match(value) === true
+  }
+  return false
+}
+
+function lowerCaseEntries (headers) {
+  return Object.fromEntries(
+    Object.entries(headers).map(([headerName, headerValue]) => {
+      return [headerName.toLocaleLowerCase(), headerValue]
+    })
+  )
+}
+
+/**
+ * @param {import('../../index').Headers|string[]|Record<string, string>} headers
+ * @param {string} key
+ */
+function getHeaderByName (headers, key) {
+  if (Array.isArray(headers)) {
+    for (let i = 0; i < headers.length; i += 2) {
+      if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) {
+        return headers[i + 1]
+      }
+    }
+
+    return undefined
+  } else if (typeof headers.get === 'function') {
+    return headers.get(key)
+  } else {
+    return lowerCaseEntries(headers)[key.toLocaleLowerCase()]
+  }
+}
+
+/** @param {string[]} headers */
+function buildHeadersFromArray (headers) { // fetch HeadersList
+  const clone = headers.slice()
+  const entries = []
+  for (let index = 0; index < clone.length; index += 2) {
+    entries.push([clone[index], clone[index + 1]])
+  }
+  return Object.fromEntries(entries)
+}
+
+function matchHeaders (mockDispatch, headers) {
+  if (typeof mockDispatch.headers === 'function') {
+    if (Array.isArray(headers)) { // fetch HeadersList
+      headers = buildHeadersFromArray(headers)
+    }
+    return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {})
+  }
+  if (typeof mockDispatch.headers === 'undefined') {
+    return true
+  }
+  if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') {
+    return false
+  }
+
+  for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) {
+    const headerValue = getHeaderByName(headers, matchHeaderName)
+
+    if (!matchValue(matchHeaderValue, headerValue)) {
+      return false
+    }
+  }
+  return true
+}
+
+function safeUrl (path) {
+  if (typeof path !== 'string') {
+    return path
+  }
+
+  const pathSegments = path.split('?')
+
+  if (pathSegments.length !== 2) {
+    return path
+  }
+
+  const qp = new URLSearchParams(pathSegments.pop())
+  qp.sort()
+  return [...pathSegments, qp.toString()].join('?')
+}
+
+function matchKey (mockDispatch, { path, method, body, headers }) {
+  const pathMatch = matchValue(mockDispatch.path, path)
+  const methodMatch = matchValue(mockDispatch.method, method)
+  const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true
+  const headersMatch = matchHeaders(mockDispatch, headers)
+  return pathMatch && methodMatch && bodyMatch && headersMatch
+}
+
+function getResponseData (data) {
+  if (Buffer.isBuffer(data)) {
+    return data
+  } else if (typeof data === 'object') {
+    return JSON.stringify(data)
+  } else {
+    return data.toString()
+  }
+}
+
+function getMockDispatch (mockDispatches, key) {
+  const basePath = key.query ? buildURL(key.path, key.query) : key.path
+  const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath
+
+  // Match path
+  let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath))
+  if (matchedMockDispatches.length === 0) {
+    throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`)
+  }
+
+  // Match method
+  matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method))
+  if (matchedMockDispatches.length === 0) {
+    throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`)
+  }
+
+  // Match body
+  matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true)
+  if (matchedMockDispatches.length === 0) {
+    throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`)
+  }
+
+  // Match headers
+  matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers))
+  if (matchedMockDispatches.length === 0) {
+    throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`)
+  }
+
+  return matchedMockDispatches[0]
+}
+
+function addMockDispatch (mockDispatches, key, data) {
+  const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false }
+  const replyData = typeof data === 'function' ? { callback: data } : { ...data }
+  const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } }
+  mockDispatches.push(newMockDispatch)
+  return newMockDispatch
+}
+
+function deleteMockDispatch (mockDispatches, key) {
+  const index = mockDispatches.findIndex(dispatch => {
+    if (!dispatch.consumed) {
+      return false
+    }
+    return matchKey(dispatch, key)
+  })
+  if (index !== -1) {
+    mockDispatches.splice(index, 1)
+  }
+}
+
+function buildKey (opts) {
+  const { path, method, body, headers, query } = opts
+  return {
+    path,
+    method,
+    body,
+    headers,
+    query
+  }
+}
+
+function generateKeyValues (data) {
+  return Object.entries(data).reduce((keyValuePairs, [key, value]) => [
+    ...keyValuePairs,
+    Buffer.from(`${key}`),
+    Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`)
+  ], [])
+}
+
+/**
+ * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status
+ * @param {number} statusCode
+ */
+function getStatusText (statusCode) {
+  return STATUS_CODES[statusCode] || 'unknown'
+}
+
+async function getResponse (body) {
+  const buffers = []
+  for await (const data of body) {
+    buffers.push(data)
+  }
+  return Buffer.concat(buffers).toString('utf8')
+}
+
+/**
+ * Mock dispatch function used to simulate undici dispatches
+ */
+function mockDispatch (opts, handler) {
+  // Get mock dispatch from built key
+  const key = buildKey(opts)
+  const mockDispatch = getMockDispatch(this[kDispatches], key)
+
+  mockDispatch.timesInvoked++
+
+  // Here's where we resolve a callback if a callback is present for the dispatch data.
+  if (mockDispatch.data.callback) {
+    mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) }
+  }
+
+  // Parse mockDispatch data
+  const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch
+  const { timesInvoked, times } = mockDispatch
+
+  // If it's used up and not persistent, mark as consumed
+  mockDispatch.consumed = !persist && timesInvoked >= times
+  mockDispatch.pending = timesInvoked < times
+
+  // If specified, trigger dispatch error
+  if (error !== null) {
+    deleteMockDispatch(this[kDispatches], key)
+    handler.onError(error)
+    return true
+  }
+
+  // Handle the request with a delay if necessary
+  if (typeof delay === 'number' && delay > 0) {
+    setTimeout(() => {
+      handleReply(this[kDispatches])
+    }, delay)
+  } else {
+    handleReply(this[kDispatches])
+  }
+
+  function handleReply (mockDispatches, _data = data) {
+    // fetch's HeadersList is a 1D string array
+    const optsHeaders = Array.isArray(opts.headers)
+      ? buildHeadersFromArray(opts.headers)
+      : opts.headers
+    const body = typeof _data === 'function'
+      ? _data({ ...opts, headers: optsHeaders })
+      : _data
+
+    // util.types.isPromise is likely needed for jest.
+    if (isPromise(body)) {
+      // If handleReply is asynchronous, throwing an error
+      // in the callback will reject the promise, rather than
+      // synchronously throw the error, which breaks some tests.
+      // Rather, we wait for the callback to resolve if it is a
+      // promise, and then re-run handleReply with the new body.
+      body.then((newData) => handleReply(mockDispatches, newData))
+      return
+    }
+
+    const responseData = getResponseData(body)
+    const responseHeaders = generateKeyValues(headers)
+    const responseTrailers = generateKeyValues(trailers)
+
+    handler.abort = nop
+    handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode))
+    handler.onData(Buffer.from(responseData))
+    handler.onComplete(responseTrailers)
+    deleteMockDispatch(mockDispatches, key)
+  }
+
+  function resume () {}
+
+  return true
+}
+
+function buildMockDispatch () {
+  const agent = this[kMockAgent]
+  const origin = this[kOrigin]
+  const originalDispatch = this[kOriginalDispatch]
+
+  return function dispatch (opts, handler) {
+    if (agent.isMockActive) {
+      try {
+        mockDispatch.call(this, opts, handler)
+      } catch (error) {
+        if (error instanceof MockNotMatchedError) {
+          const netConnect = agent[kGetNetConnect]()
+          if (netConnect === false) {
+            throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`)
+          }
+          if (checkNetConnect(netConnect, origin)) {
+            originalDispatch.call(this, opts, handler)
+          } else {
+            throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`)
+          }
+        } else {
+          throw error
+        }
+      }
+    } else {
+      originalDispatch.call(this, opts, handler)
+    }
+  }
+}
+
+function checkNetConnect (netConnect, origin) {
+  const url = new URL(origin)
+  if (netConnect === true) {
+    return true
+  } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) {
+    return true
+  }
+  return false
+}
+
+function buildMockOptions (opts) {
+  if (opts) {
+    const { agent, ...mockOptions } = opts
+    return mockOptions
+  }
+}
+
+module.exports = {
+  getResponseData,
+  getMockDispatch,
+  addMockDispatch,
+  deleteMockDispatch,
+  buildKey,
+  generateKeyValues,
+  matchValue,
+  getResponse,
+  getStatusText,
+  mockDispatch,
+  buildMockDispatch,
+  checkNetConnect,
+  buildMockOptions,
+  getHeaderByName
+}
+
+
+/***/ }),
+
+/***/ 5507:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { Transform } = __nccwpck_require__(2203)
+const { Console } = __nccwpck_require__(4236)
+
+/**
+ * Gets the output of `console.table(…)` as a string.
+ */
+module.exports = class PendingInterceptorsFormatter {
+  constructor ({ disableColors } = {}) {
+    this.transform = new Transform({
+      transform (chunk, _enc, cb) {
+        cb(null, chunk)
+      }
+    })
+
+    this.logger = new Console({
+      stdout: this.transform,
+      inspectOptions: {
+        colors: !disableColors && !process.env.CI
+      }
+    })
+  }
+
+  format (pendingInterceptors) {
+    const withPrettyHeaders = pendingInterceptors.map(
+      ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({
+        Method: method,
+        Origin: origin,
+        Path: path,
+        'Status code': statusCode,
+        Persistent: persist ? '✅' : '❌',
+        Invocations: timesInvoked,
+        Remaining: persist ? Infinity : times - timesInvoked
+      }))
+
+    this.logger.table(withPrettyHeaders)
+    return this.transform.read().toString()
+  }
+}
+
+
+/***/ }),
+
+/***/ 7012:
+/***/ ((module) => {
+
+
+
+const singulars = {
+  pronoun: 'it',
+  is: 'is',
+  was: 'was',
+  this: 'this'
+}
+
+const plurals = {
+  pronoun: 'they',
+  is: 'are',
+  was: 'were',
+  this: 'these'
+}
+
+module.exports = class Pluralizer {
+  constructor (singular, plural) {
+    this.singular = singular
+    this.plural = plural
+  }
+
+  pluralize (count) {
+    const one = count === 1
+    const keys = one ? singulars : plurals
+    const noun = one ? this.singular : this.plural
+    return { ...keys, count, noun }
+  }
+}
+
+
+/***/ }),
+
+/***/ 9414:
+/***/ ((module) => {
+
+/* eslint-disable */
+
+
+
+// Extracted from node/lib/internal/fixed_queue.js
+
+// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two.
+const kSize = 2048;
+const kMask = kSize - 1;
+
+// The FixedQueue is implemented as a singly-linked list of fixed-size
+// circular buffers. It looks something like this:
+//
+//  head                                                       tail
+//    |                                                          |
+//    v                                                          v
+// +-----------+ <-----\       +-----------+ <------\         +-----------+
+// |  [null]   |        \----- |   next    |         \------- |   next    |
+// +-----------+               +-----------+                  +-----------+
+// |   item    | <-- bottom    |   item    | <-- bottom       |  [empty]  |
+// |   item    |               |   item    |                  |  [empty]  |
+// |   item    |               |   item    |                  |  [empty]  |
+// |   item    |               |   item    |                  |  [empty]  |
+// |   item    |               |   item    |       bottom --> |   item    |
+// |   item    |               |   item    |                  |   item    |
+// |    ...    |               |    ...    |                  |    ...    |
+// |   item    |               |   item    |                  |   item    |
+// |   item    |               |   item    |                  |   item    |
+// |  [empty]  | <-- top       |   item    |                  |   item    |
+// |  [empty]  |               |   item    |                  |   item    |
+// |  [empty]  |               |  [empty]  | <-- top  top --> |  [empty]  |
+// +-----------+               +-----------+                  +-----------+
+//
+// Or, if there is only one circular buffer, it looks something
+// like either of these:
+//
+//  head   tail                                 head   tail
+//    |     |                                     |     |
+//    v     v                                     v     v
+// +-----------+                               +-----------+
+// |  [null]   |                               |  [null]   |
+// +-----------+                               +-----------+
+// |  [empty]  |                               |   item    |
+// |  [empty]  |                               |   item    |
+// |   item    | <-- bottom            top --> |  [empty]  |
+// |   item    |                               |  [empty]  |
+// |  [empty]  | <-- top            bottom --> |   item    |
+// |  [empty]  |                               |   item    |
+// +-----------+                               +-----------+
+//
+// Adding a value means moving `top` forward by one, removing means
+// moving `bottom` forward by one. After reaching the end, the queue
+// wraps around.
+//
+// When `top === bottom` the current queue is empty and when
+// `top + 1 === bottom` it's full. This wastes a single space of storage
+// but allows much quicker checks.
+
+class FixedCircularBuffer {
+  constructor() {
+    this.bottom = 0;
+    this.top = 0;
+    this.list = new Array(kSize);
+    this.next = null;
+  }
+
+  isEmpty() {
+    return this.top === this.bottom;
+  }
+
+  isFull() {
+    return ((this.top + 1) & kMask) === this.bottom;
+  }
+
+  push(data) {
+    this.list[this.top] = data;
+    this.top = (this.top + 1) & kMask;
+  }
+
+  shift() {
+    const nextItem = this.list[this.bottom];
+    if (nextItem === undefined)
+      return null;
+    this.list[this.bottom] = undefined;
+    this.bottom = (this.bottom + 1) & kMask;
+    return nextItem;
+  }
+}
+
+module.exports = class FixedQueue {
+  constructor() {
+    this.head = this.tail = new FixedCircularBuffer();
+  }
+
+  isEmpty() {
+    return this.head.isEmpty();
+  }
+
+  push(data) {
+    if (this.head.isFull()) {
+      // Head is full: Creates a new queue, sets the old queue's `.next` to it,
+      // and sets it as the new main queue.
+      this.head = this.head.next = new FixedCircularBuffer();
+    }
+    this.head.push(data);
+  }
+
+  shift() {
+    const tail = this.tail;
+    const next = tail.shift();
+    if (tail.isEmpty() && tail.next !== null) {
+      // If there is another queue, it forms the new tail.
+      this.tail = tail.next;
+    }
+    return next;
+  }
+};
+
+
+/***/ }),
+
+/***/ 9769:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const DispatcherBase = __nccwpck_require__(9368)
+const FixedQueue = __nccwpck_require__(9414)
+const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(4856)
+const PoolStats = __nccwpck_require__(9721)
+
+const kClients = Symbol('clients')
+const kNeedDrain = Symbol('needDrain')
+const kQueue = Symbol('queue')
+const kClosedResolve = Symbol('closed resolve')
+const kOnDrain = Symbol('onDrain')
+const kOnConnect = Symbol('onConnect')
+const kOnDisconnect = Symbol('onDisconnect')
+const kOnConnectionError = Symbol('onConnectionError')
+const kGetDispatcher = Symbol('get dispatcher')
+const kAddClient = Symbol('add client')
+const kRemoveClient = Symbol('remove client')
+const kStats = Symbol('stats')
+
+class PoolBase extends DispatcherBase {
+  constructor () {
+    super()
+
+    this[kQueue] = new FixedQueue()
+    this[kClients] = []
+    this[kQueued] = 0
+
+    const pool = this
+
+    this[kOnDrain] = function onDrain (origin, targets) {
+      const queue = pool[kQueue]
+
+      let needDrain = false
+
+      while (!needDrain) {
+        const item = queue.shift()
+        if (!item) {
+          break
+        }
+        pool[kQueued]--
+        needDrain = !this.dispatch(item.opts, item.handler)
+      }
+
+      this[kNeedDrain] = needDrain
+
+      if (!this[kNeedDrain] && pool[kNeedDrain]) {
+        pool[kNeedDrain] = false
+        pool.emit('drain', origin, [pool, ...targets])
+      }
+
+      if (pool[kClosedResolve] && queue.isEmpty()) {
+        Promise
+          .all(pool[kClients].map(c => c.close()))
+          .then(pool[kClosedResolve])
+      }
+    }
+
+    this[kOnConnect] = (origin, targets) => {
+      pool.emit('connect', origin, [pool, ...targets])
+    }
+
+    this[kOnDisconnect] = (origin, targets, err) => {
+      pool.emit('disconnect', origin, [pool, ...targets], err)
+    }
+
+    this[kOnConnectionError] = (origin, targets, err) => {
+      pool.emit('connectionError', origin, [pool, ...targets], err)
+    }
+
+    this[kStats] = new PoolStats(this)
+  }
+
+  get [kBusy] () {
+    return this[kNeedDrain]
+  }
+
+  get [kConnected] () {
+    return this[kClients].filter(client => client[kConnected]).length
+  }
+
+  get [kFree] () {
+    return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length
+  }
+
+  get [kPending] () {
+    let ret = this[kQueued]
+    for (const { [kPending]: pending } of this[kClients]) {
+      ret += pending
+    }
+    return ret
+  }
+
+  get [kRunning] () {
+    let ret = 0
+    for (const { [kRunning]: running } of this[kClients]) {
+      ret += running
+    }
+    return ret
+  }
+
+  get [kSize] () {
+    let ret = this[kQueued]
+    for (const { [kSize]: size } of this[kClients]) {
+      ret += size
+    }
+    return ret
+  }
+
+  get stats () {
+    return this[kStats]
+  }
+
+  async [kClose] () {
+    if (this[kQueue].isEmpty()) {
+      return Promise.all(this[kClients].map(c => c.close()))
+    } else {
+      return new Promise((resolve) => {
+        this[kClosedResolve] = resolve
+      })
+    }
+  }
+
+  async [kDestroy] (err) {
+    while (true) {
+      const item = this[kQueue].shift()
+      if (!item) {
+        break
+      }
+      item.handler.onError(err)
+    }
+
+    return Promise.all(this[kClients].map(c => c.destroy(err)))
+  }
+
+  [kDispatch] (opts, handler) {
+    const dispatcher = this[kGetDispatcher]()
+
+    if (!dispatcher) {
+      this[kNeedDrain] = true
+      this[kQueue].push({ opts, handler })
+      this[kQueued]++
+    } else if (!dispatcher.dispatch(opts, handler)) {
+      dispatcher[kNeedDrain] = true
+      this[kNeedDrain] = !this[kGetDispatcher]()
+    }
+
+    return !this[kNeedDrain]
+  }
+
+  [kAddClient] (client) {
+    client
+      .on('drain', this[kOnDrain])
+      .on('connect', this[kOnConnect])
+      .on('disconnect', this[kOnDisconnect])
+      .on('connectionError', this[kOnConnectionError])
+
+    this[kClients].push(client)
+
+    if (this[kNeedDrain]) {
+      process.nextTick(() => {
+        if (this[kNeedDrain]) {
+          this[kOnDrain](client[kUrl], [this, client])
+        }
+      })
+    }
+
+    return this
+  }
+
+  [kRemoveClient] (client) {
+    client.close(() => {
+      const idx = this[kClients].indexOf(client)
+      if (idx !== -1) {
+        this[kClients].splice(idx, 1)
+      }
+    })
+
+    this[kNeedDrain] = this[kClients].some(dispatcher => (
+      !dispatcher[kNeedDrain] &&
+      dispatcher.closed !== true &&
+      dispatcher.destroyed !== true
+    ))
+  }
+}
+
+module.exports = {
+  PoolBase,
+  kClients,
+  kNeedDrain,
+  kAddClient,
+  kRemoveClient,
+  kGetDispatcher
+}
+
+
+/***/ }),
+
+/***/ 9721:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(4856)
+const kPool = Symbol('pool')
+
+class PoolStats {
+  constructor (pool) {
+    this[kPool] = pool
+  }
+
+  get connected () {
+    return this[kPool][kConnected]
+  }
+
+  get free () {
+    return this[kPool][kFree]
+  }
+
+  get pending () {
+    return this[kPool][kPending]
+  }
+
+  get queued () {
+    return this[kPool][kQueued]
+  }
+
+  get running () {
+    return this[kPool][kRunning]
+  }
+
+  get size () {
+    return this[kPool][kSize]
+  }
+}
+
+module.exports = PoolStats
+
+
+/***/ }),
+
+/***/ 7787:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const {
+  PoolBase,
+  kClients,
+  kNeedDrain,
+  kAddClient,
+  kGetDispatcher
+} = __nccwpck_require__(9769)
+const Client = __nccwpck_require__(6646)
+const {
+  InvalidArgumentError
+} = __nccwpck_require__(3862)
+const util = __nccwpck_require__(3465)
+const { kUrl, kInterceptors } = __nccwpck_require__(4856)
+const buildConnector = __nccwpck_require__(2559)
+
+const kOptions = Symbol('options')
+const kConnections = Symbol('connections')
+const kFactory = Symbol('factory')
+
+function defaultFactory (origin, opts) {
+  return new Client(origin, opts)
+}
+
+class Pool extends PoolBase {
+  constructor (origin, {
+    connections,
+    factory = defaultFactory,
+    connect,
+    connectTimeout,
+    tls,
+    maxCachedSessions,
+    socketPath,
+    autoSelectFamily,
+    autoSelectFamilyAttemptTimeout,
+    allowH2,
+    ...options
+  } = {}) {
+    super()
+
+    if (connections != null && (!Number.isFinite(connections) || connections < 0)) {
+      throw new InvalidArgumentError('invalid connections')
+    }
+
+    if (typeof factory !== 'function') {
+      throw new InvalidArgumentError('factory must be a function.')
+    }
+
+    if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') {
+      throw new InvalidArgumentError('connect must be a function or an object')
+    }
+
+    if (typeof connect !== 'function') {
+      connect = buildConnector({
+        ...tls,
+        maxCachedSessions,
+        allowH2,
+        socketPath,
+        timeout: connectTimeout,
+        ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
+        ...connect
+      })
+    }
+
+    this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool)
+      ? options.interceptors.Pool
+      : []
+    this[kConnections] = connections || null
+    this[kUrl] = util.parseOrigin(origin)
+    this[kOptions] = { ...util.deepClone(options), connect, allowH2 }
+    this[kOptions].interceptors = options.interceptors
+      ? { ...options.interceptors }
+      : undefined
+    this[kFactory] = factory
+  }
+
+  [kGetDispatcher] () {
+    let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain])
+
+    if (dispatcher) {
+      return dispatcher
+    }
+
+    if (!this[kConnections] || this[kClients].length < this[kConnections]) {
+      dispatcher = this[kFactory](this[kUrl], this[kOptions])
+      this[kAddClient](dispatcher)
+    }
+
+    return dispatcher
+  }
+}
+
+module.exports = Pool
+
+
+/***/ }),
+
+/***/ 2145:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(4856)
+const { URL } = __nccwpck_require__(7016)
+const Agent = __nccwpck_require__(9072)
+const Pool = __nccwpck_require__(7787)
+const DispatcherBase = __nccwpck_require__(9368)
+const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(3862)
+const buildConnector = __nccwpck_require__(2559)
+
+const kAgent = Symbol('proxy agent')
+const kClient = Symbol('proxy client')
+const kProxyHeaders = Symbol('proxy headers')
+const kRequestTls = Symbol('request tls settings')
+const kProxyTls = Symbol('proxy tls settings')
+const kConnectEndpoint = Symbol('connect endpoint function')
+
+function defaultProtocolPort (protocol) {
+  return protocol === 'https:' ? 443 : 80
+}
+
+function buildProxyOptions (opts) {
+  if (typeof opts === 'string') {
+    opts = { uri: opts }
+  }
+
+  if (!opts || !opts.uri) {
+    throw new InvalidArgumentError('Proxy opts.uri is mandatory')
+  }
+
+  return {
+    uri: opts.uri,
+    protocol: opts.protocol || 'https'
+  }
+}
+
+function defaultFactory (origin, opts) {
+  return new Pool(origin, opts)
+}
+
+class ProxyAgent extends DispatcherBase {
+  constructor (opts) {
+    super(opts)
+    this[kProxy] = buildProxyOptions(opts)
+    this[kAgent] = new Agent(opts)
+    this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent)
+      ? opts.interceptors.ProxyAgent
+      : []
+
+    if (typeof opts === 'string') {
+      opts = { uri: opts }
+    }
+
+    if (!opts || !opts.uri) {
+      throw new InvalidArgumentError('Proxy opts.uri is mandatory')
+    }
+
+    const { clientFactory = defaultFactory } = opts
+
+    if (typeof clientFactory !== 'function') {
+      throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.')
+    }
+
+    this[kRequestTls] = opts.requestTls
+    this[kProxyTls] = opts.proxyTls
+    this[kProxyHeaders] = opts.headers || {}
+
+    const resolvedUrl = new URL(opts.uri)
+    const { origin, port, host, username, password } = resolvedUrl
+
+    if (opts.auth && opts.token) {
+      throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token')
+    } else if (opts.auth) {
+      /* @deprecated in favour of opts.token */
+      this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}`
+    } else if (opts.token) {
+      this[kProxyHeaders]['proxy-authorization'] = opts.token
+    } else if (username && password) {
+      this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}`
+    }
+
+    const connect = buildConnector({ ...opts.proxyTls })
+    this[kConnectEndpoint] = buildConnector({ ...opts.requestTls })
+    this[kClient] = clientFactory(resolvedUrl, { connect })
+    this[kAgent] = new Agent({
+      ...opts,
+      connect: async (opts, callback) => {
+        let requestedHost = opts.host
+        if (!opts.port) {
+          requestedHost += `:${defaultProtocolPort(opts.protocol)}`
+        }
+        try {
+          const { socket, statusCode } = await this[kClient].connect({
+            origin,
+            port,
+            path: requestedHost,
+            signal: opts.signal,
+            headers: {
+              ...this[kProxyHeaders],
+              host
+            }
+          })
+          if (statusCode !== 200) {
+            socket.on('error', () => {}).destroy()
+            callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`))
+          }
+          if (opts.protocol !== 'https:') {
+            callback(null, socket)
+            return
+          }
+          let servername
+          if (this[kRequestTls]) {
+            servername = this[kRequestTls].servername
+          } else {
+            servername = opts.servername
+          }
+          this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback)
+        } catch (err) {
+          callback(err)
+        }
+      }
+    })
+  }
+
+  dispatch (opts, handler) {
+    const { host } = new URL(opts.origin)
+    const headers = buildHeaders(opts.headers)
+    throwIfProxyAuthIsSent(headers)
+    return this[kAgent].dispatch(
+      {
+        ...opts,
+        headers: {
+          ...headers,
+          host
+        }
+      },
+      handler
+    )
+  }
+
+  async [kClose] () {
+    await this[kAgent].close()
+    await this[kClient].close()
+  }
+
+  async [kDestroy] () {
+    await this[kAgent].destroy()
+    await this[kClient].destroy()
+  }
+}
+
+/**
+ * @param {string[] | Record<string, string>} headers
+ * @returns {Record<string, string>}
+ */
+function buildHeaders (headers) {
+  // When using undici.fetch, the headers list is stored
+  // as an array.
+  if (Array.isArray(headers)) {
+    /** @type {Record<string, string>} */
+    const headersPair = {}
+
+    for (let i = 0; i < headers.length; i += 2) {
+      headersPair[headers[i]] = headers[i + 1]
+    }
+
+    return headersPair
+  }
+
+  return headers
+}
+
+/**
+ * @param {Record<string, string>} headers
+ *
+ * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers
+ * Nevertheless, it was changed and to avoid a security vulnerability by end users
+ * this check was created.
+ * It should be removed in the next major version for performance reasons
+ */
+function throwIfProxyAuthIsSent (headers) {
+  const existProxyAuth = headers && Object.keys(headers)
+    .find((key) => key.toLowerCase() === 'proxy-authorization')
+  if (existProxyAuth) {
+    throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor')
+  }
+}
+
+module.exports = ProxyAgent
+
+
+/***/ }),
+
+/***/ 2907:
+/***/ ((module) => {
+
+
+
+let fastNow = Date.now()
+let fastNowTimeout
+
+const fastTimers = []
+
+function onTimeout () {
+  fastNow = Date.now()
+
+  let len = fastTimers.length
+  let idx = 0
+  while (idx < len) {
+    const timer = fastTimers[idx]
+
+    if (timer.state === 0) {
+      timer.state = fastNow + timer.delay
+    } else if (timer.state > 0 && fastNow >= timer.state) {
+      timer.state = -1
+      timer.callback(timer.opaque)
+    }
+
+    if (timer.state === -1) {
+      timer.state = -2
+      if (idx !== len - 1) {
+        fastTimers[idx] = fastTimers.pop()
+      } else {
+        fastTimers.pop()
+      }
+      len -= 1
+    } else {
+      idx += 1
+    }
+  }
+
+  if (fastTimers.length > 0) {
+    refreshTimeout()
+  }
+}
+
+function refreshTimeout () {
+  if (fastNowTimeout && fastNowTimeout.refresh) {
+    fastNowTimeout.refresh()
+  } else {
+    clearTimeout(fastNowTimeout)
+    fastNowTimeout = setTimeout(onTimeout, 1e3)
+    if (fastNowTimeout.unref) {
+      fastNowTimeout.unref()
+    }
+  }
+}
+
+class Timeout {
+  constructor (callback, delay, opaque) {
+    this.callback = callback
+    this.delay = delay
+    this.opaque = opaque
+
+    //  -2 not in timer list
+    //  -1 in timer list but inactive
+    //   0 in timer list waiting for time
+    // > 0 in timer list waiting for time to expire
+    this.state = -2
+
+    this.refresh()
+  }
+
+  refresh () {
+    if (this.state === -2) {
+      fastTimers.push(this)
+      if (!fastNowTimeout || fastTimers.length === 1) {
+        refreshTimeout()
+      }
+    }
+
+    this.state = 0
+  }
+
+  clear () {
+    this.state = -1
+  }
+}
+
+module.exports = {
+  setTimeout (callback, delay, opaque) {
+    return delay < 1e3
+      ? setTimeout(callback, delay, opaque)
+      : new Timeout(callback, delay, opaque)
+  },
+  clearTimeout (timeout) {
+    if (timeout instanceof Timeout) {
+      timeout.clear()
+    } else {
+      clearTimeout(timeout)
+    }
+  }
+}
+
+
+/***/ }),
+
+/***/ 3097:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const diagnosticsChannel = __nccwpck_require__(1637)
+const { uid, states } = __nccwpck_require__(3464)
+const {
+  kReadyState,
+  kSentClose,
+  kByteParser,
+  kReceivedClose
+} = __nccwpck_require__(5832)
+const { fireEvent, failWebsocketConnection } = __nccwpck_require__(3385)
+const { CloseEvent } = __nccwpck_require__(3164)
+const { makeRequest } = __nccwpck_require__(5767)
+const { fetching } = __nccwpck_require__(8358)
+const { Headers } = __nccwpck_require__(2908)
+const { getGlobalDispatcher } = __nccwpck_require__(7882)
+const { kHeadersList } = __nccwpck_require__(4856)
+
+const channels = {}
+channels.open = diagnosticsChannel.channel('undici:websocket:open')
+channels.close = diagnosticsChannel.channel('undici:websocket:close')
+channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error')
+
+/** @type {import('crypto')} */
+let crypto
+try {
+  crypto = __nccwpck_require__(6982)
+} catch {
+
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#concept-websocket-establish
+ * @param {URL} url
+ * @param {string|string[]} protocols
+ * @param {import('./websocket').WebSocket} ws
+ * @param {(response: any) => void} onEstablish
+ * @param {Partial<import('../../types/websocket').WebSocketInit>} options
+ */
+function establishWebSocketConnection (url, protocols, ws, onEstablish, options) {
+  // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s
+  //    scheme is "ws", and to "https" otherwise.
+  const requestURL = url
+
+  requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:'
+
+  // 2. Let request be a new request, whose URL is requestURL, client is client,
+  //    service-workers mode is "none", referrer is "no-referrer", mode is
+  //    "websocket", credentials mode is "include", cache mode is "no-store" ,
+  //    and redirect mode is "error".
+  const request = makeRequest({
+    urlList: [requestURL],
+    serviceWorkers: 'none',
+    referrer: 'no-referrer',
+    mode: 'websocket',
+    credentials: 'include',
+    cache: 'no-store',
+    redirect: 'error'
+  })
+
+  // Note: undici extension, allow setting custom headers.
+  if (options.headers) {
+    const headersList = new Headers(options.headers)[kHeadersList]
+
+    request.headersList = headersList
+  }
+
+  // 3. Append (`Upgrade`, `websocket`) to request’s header list.
+  // 4. Append (`Connection`, `Upgrade`) to request’s header list.
+  // Note: both of these are handled by undici currently.
+  // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397
+
+  // 5. Let keyValue be a nonce consisting of a randomly selected
+  //    16-byte value that has been forgiving-base64-encoded and
+  //    isomorphic encoded.
+  const keyValue = crypto.randomBytes(16).toString('base64')
+
+  // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s
+  //    header list.
+  request.headersList.append('sec-websocket-key', keyValue)
+
+  // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s
+  //    header list.
+  request.headersList.append('sec-websocket-version', '13')
+
+  // 8. For each protocol in protocols, combine
+  //    (`Sec-WebSocket-Protocol`, protocol) in request’s header
+  //    list.
+  for (const protocol of protocols) {
+    request.headersList.append('sec-websocket-protocol', protocol)
+  }
+
+  // 9. Let permessageDeflate be a user-agent defined
+  //    "permessage-deflate" extension header value.
+  // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673
+  // TODO: enable once permessage-deflate is supported
+  const permessageDeflate = '' // 'permessage-deflate; 15'
+
+  // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to
+  //     request’s header list.
+  // request.headersList.append('sec-websocket-extensions', permessageDeflate)
+
+  // 11. Fetch request with useParallelQueue set to true, and
+  //     processResponse given response being these steps:
+  const controller = fetching({
+    request,
+    useParallelQueue: true,
+    dispatcher: options.dispatcher ?? getGlobalDispatcher(),
+    processResponse (response) {
+      // 1. If response is a network error or its status is not 101,
+      //    fail the WebSocket connection.
+      if (response.type === 'error' || response.status !== 101) {
+        failWebsocketConnection(ws, 'Received network error or non-101 status code.')
+        return
+      }
+
+      // 2. If protocols is not the empty list and extracting header
+      //    list values given `Sec-WebSocket-Protocol` and response’s
+      //    header list results in null, failure, or the empty byte
+      //    sequence, then fail the WebSocket connection.
+      if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) {
+        failWebsocketConnection(ws, 'Server did not respond with sent protocols.')
+        return
+      }
+
+      // 3. Follow the requirements stated step 2 to step 6, inclusive,
+      //    of the last set of steps in section 4.1 of The WebSocket
+      //    Protocol to validate response. This either results in fail
+      //    the WebSocket connection or the WebSocket connection is
+      //    established.
+
+      // 2. If the response lacks an |Upgrade| header field or the |Upgrade|
+      //    header field contains a value that is not an ASCII case-
+      //    insensitive match for the value "websocket", the client MUST
+      //    _Fail the WebSocket Connection_.
+      if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') {
+        failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".')
+        return
+      }
+
+      // 3. If the response lacks a |Connection| header field or the
+      //    |Connection| header field doesn't contain a token that is an
+      //    ASCII case-insensitive match for the value "Upgrade", the client
+      //    MUST _Fail the WebSocket Connection_.
+      if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') {
+        failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".')
+        return
+      }
+
+      // 4. If the response lacks a |Sec-WebSocket-Accept| header field or
+      //    the |Sec-WebSocket-Accept| contains a value other than the
+      //    base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket-
+      //    Key| (as a string, not base64-decoded) with the string "258EAFA5-
+      //    E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and
+      //    trailing whitespace, the client MUST _Fail the WebSocket
+      //    Connection_.
+      const secWSAccept = response.headersList.get('Sec-WebSocket-Accept')
+      const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64')
+      if (secWSAccept !== digest) {
+        failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.')
+        return
+      }
+
+      // 5. If the response includes a |Sec-WebSocket-Extensions| header
+      //    field and this header field indicates the use of an extension
+      //    that was not present in the client's handshake (the server has
+      //    indicated an extension not requested by the client), the client
+      //    MUST _Fail the WebSocket Connection_.  (The parsing of this
+      //    header field to determine which extensions are requested is
+      //    discussed in Section 9.1.)
+      const secExtension = response.headersList.get('Sec-WebSocket-Extensions')
+
+      if (secExtension !== null && secExtension !== permessageDeflate) {
+        failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.')
+        return
+      }
+
+      // 6. If the response includes a |Sec-WebSocket-Protocol| header field
+      //    and this header field indicates the use of a subprotocol that was
+      //    not present in the client's handshake (the server has indicated a
+      //    subprotocol not requested by the client), the client MUST _Fail
+      //    the WebSocket Connection_.
+      const secProtocol = response.headersList.get('Sec-WebSocket-Protocol')
+
+      if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) {
+        failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.')
+        return
+      }
+
+      response.socket.on('data', onSocketData)
+      response.socket.on('close', onSocketClose)
+      response.socket.on('error', onSocketError)
+
+      if (channels.open.hasSubscribers) {
+        channels.open.publish({
+          address: response.socket.address(),
+          protocol: secProtocol,
+          extensions: secExtension
+        })
+      }
+
+      onEstablish(response)
+    }
+  })
+
+  return controller
+}
+
+/**
+ * @param {Buffer} chunk
+ */
+function onSocketData (chunk) {
+  if (!this.ws[kByteParser].write(chunk)) {
+    this.pause()
+  }
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4
+ */
+function onSocketClose () {
+  const { ws } = this
+
+  // If the TCP connection was closed after the
+  // WebSocket closing handshake was completed, the WebSocket connection
+  // is said to have been closed _cleanly_.
+  const wasClean = ws[kSentClose] && ws[kReceivedClose]
+
+  let code = 1005
+  let reason = ''
+
+  const result = ws[kByteParser].closingInfo
+
+  if (result) {
+    code = result.code ?? 1005
+    reason = result.reason
+  } else if (!ws[kSentClose]) {
+    // If _The WebSocket
+    // Connection is Closed_ and no Close control frame was received by the
+    // endpoint (such as could occur if the underlying transport connection
+    // is lost), _The WebSocket Connection Close Code_ is considered to be
+    // 1006.
+    code = 1006
+  }
+
+  // 1. Change the ready state to CLOSED (3).
+  ws[kReadyState] = states.CLOSED
+
+  // 2. If the user agent was required to fail the WebSocket
+  //    connection, or if the WebSocket connection was closed
+  //    after being flagged as full, fire an event named error
+  //    at the WebSocket object.
+  // TODO
+
+  // 3. Fire an event named close at the WebSocket object,
+  //    using CloseEvent, with the wasClean attribute
+  //    initialized to true if the connection closed cleanly
+  //    and false otherwise, the code attribute initialized to
+  //    the WebSocket connection close code, and the reason
+  //    attribute initialized to the result of applying UTF-8
+  //    decode without BOM to the WebSocket connection close
+  //    reason.
+  fireEvent('close', ws, CloseEvent, {
+    wasClean, code, reason
+  })
+
+  if (channels.close.hasSubscribers) {
+    channels.close.publish({
+      websocket: ws,
+      code,
+      reason
+    })
+  }
+}
+
+function onSocketError (error) {
+  const { ws } = this
+
+  ws[kReadyState] = states.CLOSING
+
+  if (channels.socketError.hasSubscribers) {
+    channels.socketError.publish(error)
+  }
+
+  this.destroy()
+}
+
+module.exports = {
+  establishWebSocketConnection
+}
+
+
+/***/ }),
+
+/***/ 3464:
+/***/ ((module) => {
+
+
+
+// This is a Globally Unique Identifier unique used
+// to validate that the endpoint accepts websocket
+// connections.
+// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3
+const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
+
+/** @type {PropertyDescriptor} */
+const staticPropertyDescriptors = {
+  enumerable: true,
+  writable: false,
+  configurable: false
+}
+
+const states = {
+  CONNECTING: 0,
+  OPEN: 1,
+  CLOSING: 2,
+  CLOSED: 3
+}
+
+const opcodes = {
+  CONTINUATION: 0x0,
+  TEXT: 0x1,
+  BINARY: 0x2,
+  CLOSE: 0x8,
+  PING: 0x9,
+  PONG: 0xA
+}
+
+const maxUnsigned16Bit = 2 ** 16 - 1 // 65535
+
+const parserStates = {
+  INFO: 0,
+  PAYLOADLENGTH_16: 2,
+  PAYLOADLENGTH_64: 3,
+  READ_DATA: 4
+}
+
+const emptyBuffer = Buffer.allocUnsafe(0)
+
+module.exports = {
+  uid,
+  staticPropertyDescriptors,
+  states,
+  opcodes,
+  maxUnsigned16Bit,
+  parserStates,
+  emptyBuffer
+}
+
+
+/***/ }),
+
+/***/ 3164:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { webidl } = __nccwpck_require__(29)
+const { kEnumerableProperty } = __nccwpck_require__(3465)
+const { MessagePort } = __nccwpck_require__(8167)
+
+/**
+ * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent
+ */
+class MessageEvent extends Event {
+  #eventInit
+
+  constructor (type, eventInitDict = {}) {
+    webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' })
+
+    type = webidl.converters.DOMString(type)
+    eventInitDict = webidl.converters.MessageEventInit(eventInitDict)
+
+    super(type, eventInitDict)
+
+    this.#eventInit = eventInitDict
+  }
+
+  get data () {
+    webidl.brandCheck(this, MessageEvent)
+
+    return this.#eventInit.data
+  }
+
+  get origin () {
+    webidl.brandCheck(this, MessageEvent)
+
+    return this.#eventInit.origin
+  }
+
+  get lastEventId () {
+    webidl.brandCheck(this, MessageEvent)
+
+    return this.#eventInit.lastEventId
+  }
+
+  get source () {
+    webidl.brandCheck(this, MessageEvent)
+
+    return this.#eventInit.source
+  }
+
+  get ports () {
+    webidl.brandCheck(this, MessageEvent)
+
+    if (!Object.isFrozen(this.#eventInit.ports)) {
+      Object.freeze(this.#eventInit.ports)
+    }
+
+    return this.#eventInit.ports
+  }
+
+  initMessageEvent (
+    type,
+    bubbles = false,
+    cancelable = false,
+    data = null,
+    origin = '',
+    lastEventId = '',
+    source = null,
+    ports = []
+  ) {
+    webidl.brandCheck(this, MessageEvent)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' })
+
+    return new MessageEvent(type, {
+      bubbles, cancelable, data, origin, lastEventId, source, ports
+    })
+  }
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#the-closeevent-interface
+ */
+class CloseEvent extends Event {
+  #eventInit
+
+  constructor (type, eventInitDict = {}) {
+    webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' })
+
+    type = webidl.converters.DOMString(type)
+    eventInitDict = webidl.converters.CloseEventInit(eventInitDict)
+
+    super(type, eventInitDict)
+
+    this.#eventInit = eventInitDict
+  }
+
+  get wasClean () {
+    webidl.brandCheck(this, CloseEvent)
+
+    return this.#eventInit.wasClean
+  }
+
+  get code () {
+    webidl.brandCheck(this, CloseEvent)
+
+    return this.#eventInit.code
+  }
+
+  get reason () {
+    webidl.brandCheck(this, CloseEvent)
+
+    return this.#eventInit.reason
+  }
+}
+
+// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface
+class ErrorEvent extends Event {
+  #eventInit
+
+  constructor (type, eventInitDict) {
+    webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' })
+
+    super(type, eventInitDict)
+
+    type = webidl.converters.DOMString(type)
+    eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {})
+
+    this.#eventInit = eventInitDict
+  }
+
+  get message () {
+    webidl.brandCheck(this, ErrorEvent)
+
+    return this.#eventInit.message
+  }
+
+  get filename () {
+    webidl.brandCheck(this, ErrorEvent)
+
+    return this.#eventInit.filename
+  }
+
+  get lineno () {
+    webidl.brandCheck(this, ErrorEvent)
+
+    return this.#eventInit.lineno
+  }
+
+  get colno () {
+    webidl.brandCheck(this, ErrorEvent)
+
+    return this.#eventInit.colno
+  }
+
+  get error () {
+    webidl.brandCheck(this, ErrorEvent)
+
+    return this.#eventInit.error
+  }
+}
+
+Object.defineProperties(MessageEvent.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'MessageEvent',
+    configurable: true
+  },
+  data: kEnumerableProperty,
+  origin: kEnumerableProperty,
+  lastEventId: kEnumerableProperty,
+  source: kEnumerableProperty,
+  ports: kEnumerableProperty,
+  initMessageEvent: kEnumerableProperty
+})
+
+Object.defineProperties(CloseEvent.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'CloseEvent',
+    configurable: true
+  },
+  reason: kEnumerableProperty,
+  code: kEnumerableProperty,
+  wasClean: kEnumerableProperty
+})
+
+Object.defineProperties(ErrorEvent.prototype, {
+  [Symbol.toStringTag]: {
+    value: 'ErrorEvent',
+    configurable: true
+  },
+  message: kEnumerableProperty,
+  filename: kEnumerableProperty,
+  lineno: kEnumerableProperty,
+  colno: kEnumerableProperty,
+  error: kEnumerableProperty
+})
+
+webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort)
+
+webidl.converters['sequence<MessagePort>'] = webidl.sequenceConverter(
+  webidl.converters.MessagePort
+)
+
+const eventInit = [
+  {
+    key: 'bubbles',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'cancelable',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'composed',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  }
+]
+
+webidl.converters.MessageEventInit = webidl.dictionaryConverter([
+  ...eventInit,
+  {
+    key: 'data',
+    converter: webidl.converters.any,
+    defaultValue: null
+  },
+  {
+    key: 'origin',
+    converter: webidl.converters.USVString,
+    defaultValue: ''
+  },
+  {
+    key: 'lastEventId',
+    converter: webidl.converters.DOMString,
+    defaultValue: ''
+  },
+  {
+    key: 'source',
+    // Node doesn't implement WindowProxy or ServiceWorker, so the only
+    // valid value for source is a MessagePort.
+    converter: webidl.nullableConverter(webidl.converters.MessagePort),
+    defaultValue: null
+  },
+  {
+    key: 'ports',
+    converter: webidl.converters['sequence<MessagePort>'],
+    get defaultValue () {
+      return []
+    }
+  }
+])
+
+webidl.converters.CloseEventInit = webidl.dictionaryConverter([
+  ...eventInit,
+  {
+    key: 'wasClean',
+    converter: webidl.converters.boolean,
+    defaultValue: false
+  },
+  {
+    key: 'code',
+    converter: webidl.converters['unsigned short'],
+    defaultValue: 0
+  },
+  {
+    key: 'reason',
+    converter: webidl.converters.USVString,
+    defaultValue: ''
+  }
+])
+
+webidl.converters.ErrorEventInit = webidl.dictionaryConverter([
+  ...eventInit,
+  {
+    key: 'message',
+    converter: webidl.converters.DOMString,
+    defaultValue: ''
+  },
+  {
+    key: 'filename',
+    converter: webidl.converters.USVString,
+    defaultValue: ''
+  },
+  {
+    key: 'lineno',
+    converter: webidl.converters['unsigned long'],
+    defaultValue: 0
+  },
+  {
+    key: 'colno',
+    converter: webidl.converters['unsigned long'],
+    defaultValue: 0
+  },
+  {
+    key: 'error',
+    converter: webidl.converters.any
+  }
+])
+
+module.exports = {
+  MessageEvent,
+  CloseEvent,
+  ErrorEvent
+}
+
+
+/***/ }),
+
+/***/ 3272:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { maxUnsigned16Bit } = __nccwpck_require__(3464)
+
+/** @type {import('crypto')} */
+let crypto
+try {
+  crypto = __nccwpck_require__(6982)
+} catch {
+
+}
+
+class WebsocketFrameSend {
+  /**
+   * @param {Buffer|undefined} data
+   */
+  constructor (data) {
+    this.frameData = data
+    this.maskKey = crypto.randomBytes(4)
+  }
+
+  createFrame (opcode) {
+    const bodyLength = this.frameData?.byteLength ?? 0
+
+    /** @type {number} */
+    let payloadLength = bodyLength // 0-125
+    let offset = 6
+
+    if (bodyLength > maxUnsigned16Bit) {
+      offset += 8 // payload length is next 8 bytes
+      payloadLength = 127
+    } else if (bodyLength > 125) {
+      offset += 2 // payload length is next 2 bytes
+      payloadLength = 126
+    }
+
+    const buffer = Buffer.allocUnsafe(bodyLength + offset)
+
+    // Clear first 2 bytes, everything else is overwritten
+    buffer[0] = buffer[1] = 0
+    buffer[0] |= 0x80 // FIN
+    buffer[0] = (buffer[0] & 0xF0) + opcode // opcode
+
+    /*! ws. MIT License. Einar Otto Stangvik <einaros@gmail.com> */
+    buffer[offset - 4] = this.maskKey[0]
+    buffer[offset - 3] = this.maskKey[1]
+    buffer[offset - 2] = this.maskKey[2]
+    buffer[offset - 1] = this.maskKey[3]
+
+    buffer[1] = payloadLength
+
+    if (payloadLength === 126) {
+      buffer.writeUInt16BE(bodyLength, 2)
+    } else if (payloadLength === 127) {
+      // Clear extended payload length
+      buffer[2] = buffer[3] = 0
+      buffer.writeUIntBE(bodyLength, 4, 6)
+    }
+
+    buffer[1] |= 0x80 // MASK
+
+    // mask body
+    for (let i = 0; i < bodyLength; i++) {
+      buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4]
+    }
+
+    return buffer
+  }
+}
+
+module.exports = {
+  WebsocketFrameSend
+}
+
+
+/***/ }),
+
+/***/ 1084:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { Writable } = __nccwpck_require__(2203)
+const diagnosticsChannel = __nccwpck_require__(1637)
+const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(3464)
+const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(5832)
+const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(3385)
+const { WebsocketFrameSend } = __nccwpck_require__(3272)
+
+// This code was influenced by ws released under the MIT license.
+// Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com>
+// Copyright (c) 2013 Arnout Kazemier and contributors
+// Copyright (c) 2016 Luigi Pinca and contributors
+
+const channels = {}
+channels.ping = diagnosticsChannel.channel('undici:websocket:ping')
+channels.pong = diagnosticsChannel.channel('undici:websocket:pong')
+
+class ByteParser extends Writable {
+  #buffers = []
+  #byteOffset = 0
+
+  #state = parserStates.INFO
+
+  #info = {}
+  #fragments = []
+
+  constructor (ws) {
+    super()
+
+    this.ws = ws
+  }
+
+  /**
+   * @param {Buffer} chunk
+   * @param {() => void} callback
+   */
+  _write (chunk, _, callback) {
+    this.#buffers.push(chunk)
+    this.#byteOffset += chunk.length
+
+    this.run(callback)
+  }
+
+  /**
+   * Runs whenever a new chunk is received.
+   * Callback is called whenever there are no more chunks buffering,
+   * or not enough bytes are buffered to parse.
+   */
+  run (callback) {
+    while (true) {
+      if (this.#state === parserStates.INFO) {
+        // If there aren't enough bytes to parse the payload length, etc.
+        if (this.#byteOffset < 2) {
+          return callback()
+        }
+
+        const buffer = this.consume(2)
+
+        this.#info.fin = (buffer[0] & 0x80) !== 0
+        this.#info.opcode = buffer[0] & 0x0F
+
+        // If we receive a fragmented message, we use the type of the first
+        // frame to parse the full message as binary/text, when it's terminated
+        this.#info.originalOpcode ??= this.#info.opcode
+
+        this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION
+
+        if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) {
+          // Only text and binary frames can be fragmented
+          failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.')
+          return
+        }
+
+        const payloadLength = buffer[1] & 0x7F
+
+        if (payloadLength <= 125) {
+          this.#info.payloadLength = payloadLength
+          this.#state = parserStates.READ_DATA
+        } else if (payloadLength === 126) {
+          this.#state = parserStates.PAYLOADLENGTH_16
+        } else if (payloadLength === 127) {
+          this.#state = parserStates.PAYLOADLENGTH_64
+        }
+
+        if (this.#info.fragmented && payloadLength > 125) {
+          // A fragmented frame can't be fragmented itself
+          failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.')
+          return
+        } else if (
+          (this.#info.opcode === opcodes.PING ||
+            this.#info.opcode === opcodes.PONG ||
+            this.#info.opcode === opcodes.CLOSE) &&
+          payloadLength > 125
+        ) {
+          // Control frames can have a payload length of 125 bytes MAX
+          failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.')
+          return
+        } else if (this.#info.opcode === opcodes.CLOSE) {
+          if (payloadLength === 1) {
+            failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.')
+            return
+          }
+
+          const body = this.consume(payloadLength)
+
+          this.#info.closeInfo = this.parseCloseBody(false, body)
+
+          if (!this.ws[kSentClose]) {
+            // If an endpoint receives a Close frame and did not previously send a
+            // Close frame, the endpoint MUST send a Close frame in response.  (When
+            // sending a Close frame in response, the endpoint typically echos the
+            // status code it received.)
+            const body = Buffer.allocUnsafe(2)
+            body.writeUInt16BE(this.#info.closeInfo.code, 0)
+            const closeFrame = new WebsocketFrameSend(body)
+
+            this.ws[kResponse].socket.write(
+              closeFrame.createFrame(opcodes.CLOSE),
+              (err) => {
+                if (!err) {
+                  this.ws[kSentClose] = true
+                }
+              }
+            )
+          }
+
+          // Upon either sending or receiving a Close control frame, it is said
+          // that _The WebSocket Closing Handshake is Started_ and that the
+          // WebSocket connection is in the CLOSING state.
+          this.ws[kReadyState] = states.CLOSING
+          this.ws[kReceivedClose] = true
+
+          this.end()
+
+          return
+        } else if (this.#info.opcode === opcodes.PING) {
+          // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in
+          // response, unless it already received a Close frame.
+          // A Pong frame sent in response to a Ping frame must have identical
+          // "Application data"
+
+          const body = this.consume(payloadLength)
+
+          if (!this.ws[kReceivedClose]) {
+            const frame = new WebsocketFrameSend(body)
+
+            this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG))
+
+            if (channels.ping.hasSubscribers) {
+              channels.ping.publish({
+                payload: body
+              })
+            }
+          }
+
+          this.#state = parserStates.INFO
+
+          if (this.#byteOffset > 0) {
+            continue
+          } else {
+            callback()
+            return
+          }
+        } else if (this.#info.opcode === opcodes.PONG) {
+          // A Pong frame MAY be sent unsolicited.  This serves as a
+          // unidirectional heartbeat.  A response to an unsolicited Pong frame is
+          // not expected.
+
+          const body = this.consume(payloadLength)
+
+          if (channels.pong.hasSubscribers) {
+            channels.pong.publish({
+              payload: body
+            })
+          }
+
+          if (this.#byteOffset > 0) {
+            continue
+          } else {
+            callback()
+            return
+          }
+        }
+      } else if (this.#state === parserStates.PAYLOADLENGTH_16) {
+        if (this.#byteOffset < 2) {
+          return callback()
+        }
+
+        const buffer = this.consume(2)
+
+        this.#info.payloadLength = buffer.readUInt16BE(0)
+        this.#state = parserStates.READ_DATA
+      } else if (this.#state === parserStates.PAYLOADLENGTH_64) {
+        if (this.#byteOffset < 8) {
+          return callback()
+        }
+
+        const buffer = this.consume(8)
+        const upper = buffer.readUInt32BE(0)
+
+        // 2^31 is the maxinimum bytes an arraybuffer can contain
+        // on 32-bit systems. Although, on 64-bit systems, this is
+        // 2^53-1 bytes.
+        // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length
+        // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275
+        // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e
+        if (upper > 2 ** 31 - 1) {
+          failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.')
+          return
+        }
+
+        const lower = buffer.readUInt32BE(4)
+
+        this.#info.payloadLength = (upper << 8) + lower
+        this.#state = parserStates.READ_DATA
+      } else if (this.#state === parserStates.READ_DATA) {
+        if (this.#byteOffset < this.#info.payloadLength) {
+          // If there is still more data in this chunk that needs to be read
+          return callback()
+        } else if (this.#byteOffset >= this.#info.payloadLength) {
+          // If the server sent multiple frames in a single chunk
+
+          const body = this.consume(this.#info.payloadLength)
+
+          this.#fragments.push(body)
+
+          // If the frame is unfragmented, or a fragmented frame was terminated,
+          // a message was received
+          if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) {
+            const fullMessage = Buffer.concat(this.#fragments)
+
+            websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage)
+
+            this.#info = {}
+            this.#fragments.length = 0
+          }
+
+          this.#state = parserStates.INFO
+        }
+      }
+
+      if (this.#byteOffset > 0) {
+        continue
+      } else {
+        callback()
+        break
+      }
+    }
+  }
+
+  /**
+   * Take n bytes from the buffered Buffers
+   * @param {number} n
+   * @returns {Buffer|null}
+   */
+  consume (n) {
+    if (n > this.#byteOffset) {
+      return null
+    } else if (n === 0) {
+      return emptyBuffer
+    }
+
+    if (this.#buffers[0].length === n) {
+      this.#byteOffset -= this.#buffers[0].length
+      return this.#buffers.shift()
+    }
+
+    const buffer = Buffer.allocUnsafe(n)
+    let offset = 0
+
+    while (offset !== n) {
+      const next = this.#buffers[0]
+      const { length } = next
+
+      if (length + offset === n) {
+        buffer.set(this.#buffers.shift(), offset)
+        break
+      } else if (length + offset > n) {
+        buffer.set(next.subarray(0, n - offset), offset)
+        this.#buffers[0] = next.subarray(n - offset)
+        break
+      } else {
+        buffer.set(this.#buffers.shift(), offset)
+        offset += next.length
+      }
+    }
+
+    this.#byteOffset -= n
+
+    return buffer
+  }
+
+  parseCloseBody (onlyCode, data) {
+    // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5
+    /** @type {number|undefined} */
+    let code
+
+    if (data.length >= 2) {
+      // _The WebSocket Connection Close Code_ is
+      // defined as the status code (Section 7.4) contained in the first Close
+      // control frame received by the application
+      code = data.readUInt16BE(0)
+    }
+
+    if (onlyCode) {
+      if (!isValidStatusCode(code)) {
+        return null
+      }
+
+      return { code }
+    }
+
+    // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6
+    /** @type {Buffer} */
+    let reason = data.subarray(2)
+
+    // Remove BOM
+    if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) {
+      reason = reason.subarray(3)
+    }
+
+    if (code !== undefined && !isValidStatusCode(code)) {
+      return null
+    }
+
+    try {
+      // TODO: optimize this
+      reason = new TextDecoder('utf-8', { fatal: true }).decode(reason)
+    } catch {
+      return null
+    }
+
+    return { code, reason }
+  }
+
+  get closingInfo () {
+    return this.#info.closeInfo
+  }
+}
+
+module.exports = {
+  ByteParser
+}
+
+
+/***/ }),
+
+/***/ 5832:
+/***/ ((module) => {
+
+
+
+module.exports = {
+  kWebSocketURL: Symbol('url'),
+  kReadyState: Symbol('ready state'),
+  kController: Symbol('controller'),
+  kResponse: Symbol('response'),
+  kBinaryType: Symbol('binary type'),
+  kSentClose: Symbol('sent close'),
+  kReceivedClose: Symbol('received close'),
+  kByteParser: Symbol('byte parser')
+}
+
+
+/***/ }),
+
+/***/ 3385:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(5832)
+const { states, opcodes } = __nccwpck_require__(3464)
+const { MessageEvent, ErrorEvent } = __nccwpck_require__(3164)
+
+/* globals Blob */
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isEstablished (ws) {
+  // If the server's response is validated as provided for above, it is
+  // said that _The WebSocket Connection is Established_ and that the
+  // WebSocket Connection is in the OPEN state.
+  return ws[kReadyState] === states.OPEN
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isClosing (ws) {
+  // Upon either sending or receiving a Close control frame, it is said
+  // that _The WebSocket Closing Handshake is Started_ and that the
+  // WebSocket connection is in the CLOSING state.
+  return ws[kReadyState] === states.CLOSING
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ */
+function isClosed (ws) {
+  return ws[kReadyState] === states.CLOSED
+}
+
+/**
+ * @see https://dom.spec.whatwg.org/#concept-event-fire
+ * @param {string} e
+ * @param {EventTarget} target
+ * @param {EventInit | undefined} eventInitDict
+ */
+function fireEvent (e, target, eventConstructor = Event, eventInitDict) {
+  // 1. If eventConstructor is not given, then let eventConstructor be Event.
+
+  // 2. Let event be the result of creating an event given eventConstructor,
+  //    in the relevant realm of target.
+  // 3. Initialize event’s type attribute to e.
+  const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap
+
+  // 4. Initialize any other IDL attributes of event as described in the
+  //    invocation of this algorithm.
+
+  // 5. Return the result of dispatching event at target, with legacy target
+  //    override flag set if set.
+  target.dispatchEvent(event)
+}
+
+/**
+ * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+ * @param {import('./websocket').WebSocket} ws
+ * @param {number} type Opcode
+ * @param {Buffer} data application data
+ */
+function websocketMessageReceived (ws, type, data) {
+  // 1. If ready state is not OPEN (1), then return.
+  if (ws[kReadyState] !== states.OPEN) {
+    return
+  }
+
+  // 2. Let dataForEvent be determined by switching on type and binary type:
+  let dataForEvent
+
+  if (type === opcodes.TEXT) {
+    // -> type indicates that the data is Text
+    //      a new DOMString containing data
+    try {
+      dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data)
+    } catch {
+      failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.')
+      return
+    }
+  } else if (type === opcodes.BINARY) {
+    if (ws[kBinaryType] === 'blob') {
+      // -> type indicates that the data is Binary and binary type is "blob"
+      //      a new Blob object, created in the relevant Realm of the WebSocket
+      //      object, that represents data as its raw data
+      dataForEvent = new Blob([data])
+    } else {
+      // -> type indicates that the data is Binary and binary type is "arraybuffer"
+      //      a new ArrayBuffer object, created in the relevant Realm of the
+      //      WebSocket object, whose contents are data
+      dataForEvent = new Uint8Array(data).buffer
+    }
+  }
+
+  // 3. Fire an event named message at the WebSocket object, using MessageEvent,
+  //    with the origin attribute initialized to the serialization of the WebSocket
+  //    object’s url's origin, and the data attribute initialized to dataForEvent.
+  fireEvent('message', ws, MessageEvent, {
+    origin: ws[kWebSocketURL].origin,
+    data: dataForEvent
+  })
+}
+
+/**
+ * @see https://datatracker.ietf.org/doc/html/rfc6455
+ * @see https://datatracker.ietf.org/doc/html/rfc2616
+ * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407
+ * @param {string} protocol
+ */
+function isValidSubprotocol (protocol) {
+  // If present, this value indicates one
+  // or more comma-separated subprotocol the client wishes to speak,
+  // ordered by preference.  The elements that comprise this value
+  // MUST be non-empty strings with characters in the range U+0021 to
+  // U+007E not including separator characters as defined in
+  // [RFC2616] and MUST all be unique strings.
+  if (protocol.length === 0) {
+    return false
+  }
+
+  for (const char of protocol) {
+    const code = char.charCodeAt(0)
+
+    if (
+      code < 0x21 ||
+      code > 0x7E ||
+      char === '(' ||
+      char === ')' ||
+      char === '<' ||
+      char === '>' ||
+      char === '@' ||
+      char === ',' ||
+      char === ';' ||
+      char === ':' ||
+      char === '\\' ||
+      char === '"' ||
+      char === '/' ||
+      char === '[' ||
+      char === ']' ||
+      char === '?' ||
+      char === '=' ||
+      char === '{' ||
+      char === '}' ||
+      code === 32 || // SP
+      code === 9 // HT
+    ) {
+      return false
+    }
+  }
+
+  return true
+}
+
+/**
+ * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4
+ * @param {number} code
+ */
+function isValidStatusCode (code) {
+  if (code >= 1000 && code < 1015) {
+    return (
+      code !== 1004 && // reserved
+      code !== 1005 && // "MUST NOT be set as a status code"
+      code !== 1006 // "MUST NOT be set as a status code"
+    )
+  }
+
+  return code >= 3000 && code <= 4999
+}
+
+/**
+ * @param {import('./websocket').WebSocket} ws
+ * @param {string|undefined} reason
+ */
+function failWebsocketConnection (ws, reason) {
+  const { [kController]: controller, [kResponse]: response } = ws
+
+  controller.abort()
+
+  if (response?.socket && !response.socket.destroyed) {
+    response.socket.destroy()
+  }
+
+  if (reason) {
+    fireEvent('error', ws, ErrorEvent, {
+      error: new Error(reason)
+    })
+  }
+}
+
+module.exports = {
+  isEstablished,
+  isClosing,
+  isClosed,
+  fireEvent,
+  isValidSubprotocol,
+  isValidStatusCode,
+  failWebsocketConnection,
+  websocketMessageReceived
+}
+
+
+/***/ }),
+
+/***/ 6102:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const { webidl } = __nccwpck_require__(29)
+const { DOMException } = __nccwpck_require__(4135)
+const { URLSerializer } = __nccwpck_require__(3911)
+const { getGlobalOrigin } = __nccwpck_require__(9963)
+const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(3464)
+const {
+  kWebSocketURL,
+  kReadyState,
+  kController,
+  kBinaryType,
+  kResponse,
+  kSentClose,
+  kByteParser
+} = __nccwpck_require__(5832)
+const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(3385)
+const { establishWebSocketConnection } = __nccwpck_require__(3097)
+const { WebsocketFrameSend } = __nccwpck_require__(3272)
+const { ByteParser } = __nccwpck_require__(1084)
+const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3465)
+const { getGlobalDispatcher } = __nccwpck_require__(7882)
+const { types } = __nccwpck_require__(9023)
+
+let experimentalWarned = false
+
+// https://websockets.spec.whatwg.org/#interface-definition
+class WebSocket extends EventTarget {
+  #events = {
+    open: null,
+    error: null,
+    close: null,
+    message: null
+  }
+
+  #bufferedAmount = 0
+  #protocol = ''
+  #extensions = ''
+
+  /**
+   * @param {string} url
+   * @param {string|string[]} protocols
+   */
+  constructor (url, protocols = []) {
+    super()
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' })
+
+    if (!experimentalWarned) {
+      experimentalWarned = true
+      process.emitWarning('WebSockets are experimental, expect them to change at any time.', {
+        code: 'UNDICI-WS'
+      })
+    }
+
+    const options = webidl.converters['DOMString or sequence<DOMString> or WebSocketInit'](protocols)
+
+    url = webidl.converters.USVString(url)
+    protocols = options.protocols
+
+    // 1. Let baseURL be this's relevant settings object's API base URL.
+    const baseURL = getGlobalOrigin()
+
+    // 1. Let urlRecord be the result of applying the URL parser to url with baseURL.
+    let urlRecord
+
+    try {
+      urlRecord = new URL(url, baseURL)
+    } catch (e) {
+      // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException.
+      throw new DOMException(e, 'SyntaxError')
+    }
+
+    // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws".
+    if (urlRecord.protocol === 'http:') {
+      urlRecord.protocol = 'ws:'
+    } else if (urlRecord.protocol === 'https:') {
+      // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss".
+      urlRecord.protocol = 'wss:'
+    }
+
+    // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException.
+    if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') {
+      throw new DOMException(
+        `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`,
+        'SyntaxError'
+      )
+    }
+
+    // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError"
+    //    DOMException.
+    if (urlRecord.hash || urlRecord.href.endsWith('#')) {
+      throw new DOMException('Got fragment', 'SyntaxError')
+    }
+
+    // 8. If protocols is a string, set protocols to a sequence consisting
+    //    of just that string.
+    if (typeof protocols === 'string') {
+      protocols = [protocols]
+    }
+
+    // 9. If any of the values in protocols occur more than once or otherwise
+    //    fail to match the requirements for elements that comprise the value
+    //    of `Sec-WebSocket-Protocol` fields as defined by The WebSocket
+    //    protocol, then throw a "SyntaxError" DOMException.
+    if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) {
+      throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
+    }
+
+    if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) {
+      throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
+    }
+
+    // 10. Set this's url to urlRecord.
+    this[kWebSocketURL] = new URL(urlRecord.href)
+
+    // 11. Let client be this's relevant settings object.
+
+    // 12. Run this step in parallel:
+
+    //    1. Establish a WebSocket connection given urlRecord, protocols,
+    //       and client.
+    this[kController] = establishWebSocketConnection(
+      urlRecord,
+      protocols,
+      this,
+      (response) => this.#onConnectionEstablished(response),
+      options
+    )
+
+    // Each WebSocket object has an associated ready state, which is a
+    // number representing the state of the connection. Initially it must
+    // be CONNECTING (0).
+    this[kReadyState] = WebSocket.CONNECTING
+
+    // The extensions attribute must initially return the empty string.
+
+    // The protocol attribute must initially return the empty string.
+
+    // Each WebSocket object has an associated binary type, which is a
+    // BinaryType. Initially it must be "blob".
+    this[kBinaryType] = 'blob'
+  }
+
+  /**
+   * @see https://websockets.spec.whatwg.org/#dom-websocket-close
+   * @param {number|undefined} code
+   * @param {string|undefined} reason
+   */
+  close (code = undefined, reason = undefined) {
+    webidl.brandCheck(this, WebSocket)
+
+    if (code !== undefined) {
+      code = webidl.converters['unsigned short'](code, { clamp: true })
+    }
+
+    if (reason !== undefined) {
+      reason = webidl.converters.USVString(reason)
+    }
+
+    // 1. If code is present, but is neither an integer equal to 1000 nor an
+    //    integer in the range 3000 to 4999, inclusive, throw an
+    //    "InvalidAccessError" DOMException.
+    if (code !== undefined) {
+      if (code !== 1000 && (code < 3000 || code > 4999)) {
+        throw new DOMException('invalid code', 'InvalidAccessError')
+      }
+    }
+
+    let reasonByteLength = 0
+
+    // 2. If reason is present, then run these substeps:
+    if (reason !== undefined) {
+      // 1. Let reasonBytes be the result of encoding reason.
+      // 2. If reasonBytes is longer than 123 bytes, then throw a
+      //    "SyntaxError" DOMException.
+      reasonByteLength = Buffer.byteLength(reason)
+
+      if (reasonByteLength > 123) {
+        throw new DOMException(
+          `Reason must be less than 123 bytes; received ${reasonByteLength}`,
+          'SyntaxError'
+        )
+      }
+    }
+
+    // 3. Run the first matching steps from the following list:
+    if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) {
+      // If this's ready state is CLOSING (2) or CLOSED (3)
+      // Do nothing.
+    } else if (!isEstablished(this)) {
+      // If the WebSocket connection is not yet established
+      // Fail the WebSocket connection and set this's ready state
+      // to CLOSING (2).
+      failWebsocketConnection(this, 'Connection was closed before it was established.')
+      this[kReadyState] = WebSocket.CLOSING
+    } else if (!isClosing(this)) {
+      // If the WebSocket closing handshake has not yet been started
+      // Start the WebSocket closing handshake and set this's ready
+      // state to CLOSING (2).
+      // - If neither code nor reason is present, the WebSocket Close
+      //   message must not have a body.
+      // - If code is present, then the status code to use in the
+      //   WebSocket Close message must be the integer given by code.
+      // - If reason is also present, then reasonBytes must be
+      //   provided in the Close message after the status code.
+
+      const frame = new WebsocketFrameSend()
+
+      // If neither code nor reason is present, the WebSocket Close
+      // message must not have a body.
+
+      // If code is present, then the status code to use in the
+      // WebSocket Close message must be the integer given by code.
+      if (code !== undefined && reason === undefined) {
+        frame.frameData = Buffer.allocUnsafe(2)
+        frame.frameData.writeUInt16BE(code, 0)
+      } else if (code !== undefined && reason !== undefined) {
+        // If reason is also present, then reasonBytes must be
+        // provided in the Close message after the status code.
+        frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength)
+        frame.frameData.writeUInt16BE(code, 0)
+        // the body MAY contain UTF-8-encoded data with value /reason/
+        frame.frameData.write(reason, 2, 'utf-8')
+      } else {
+        frame.frameData = emptyBuffer
+      }
+
+      /** @type {import('stream').Duplex} */
+      const socket = this[kResponse].socket
+
+      socket.write(frame.createFrame(opcodes.CLOSE), (err) => {
+        if (!err) {
+          this[kSentClose] = true
+        }
+      })
+
+      // Upon either sending or receiving a Close control frame, it is said
+      // that _The WebSocket Closing Handshake is Started_ and that the
+      // WebSocket connection is in the CLOSING state.
+      this[kReadyState] = states.CLOSING
+    } else {
+      // Otherwise
+      // Set this's ready state to CLOSING (2).
+      this[kReadyState] = WebSocket.CLOSING
+    }
+  }
+
+  /**
+   * @see https://websockets.spec.whatwg.org/#dom-websocket-send
+   * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data
+   */
+  send (data) {
+    webidl.brandCheck(this, WebSocket)
+
+    webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' })
+
+    data = webidl.converters.WebSocketSendData(data)
+
+    // 1. If this's ready state is CONNECTING, then throw an
+    //    "InvalidStateError" DOMException.
+    if (this[kReadyState] === WebSocket.CONNECTING) {
+      throw new DOMException('Sent before connected.', 'InvalidStateError')
+    }
+
+    // 2. Run the appropriate set of steps from the following list:
+    // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1
+    // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
+
+    if (!isEstablished(this) || isClosing(this)) {
+      return
+    }
+
+    /** @type {import('stream').Duplex} */
+    const socket = this[kResponse].socket
+
+    // If data is a string
+    if (typeof data === 'string') {
+      // If the WebSocket connection is established and the WebSocket
+      // closing handshake has not yet started, then the user agent
+      // must send a WebSocket Message comprised of the data argument
+      // using a text frame opcode; if the data cannot be sent, e.g.
+      // because it would need to be buffered but the buffer is full,
+      // the user agent must flag the WebSocket as full and then close
+      // the WebSocket connection. Any invocation of this method with a
+      // string argument that does not throw an exception must increase
+      // the bufferedAmount attribute by the number of bytes needed to
+      // express the argument as UTF-8.
+
+      const value = Buffer.from(data)
+      const frame = new WebsocketFrameSend(value)
+      const buffer = frame.createFrame(opcodes.TEXT)
+
+      this.#bufferedAmount += value.byteLength
+      socket.write(buffer, () => {
+        this.#bufferedAmount -= value.byteLength
+      })
+    } else if (types.isArrayBuffer(data)) {
+      // If the WebSocket connection is established, and the WebSocket
+      // closing handshake has not yet started, then the user agent must
+      // send a WebSocket Message comprised of data using a binary frame
+      // opcode; if the data cannot be sent, e.g. because it would need
+      // to be buffered but the buffer is full, the user agent must flag
+      // the WebSocket as full and then close the WebSocket connection.
+      // The data to be sent is the data stored in the buffer described
+      // by the ArrayBuffer object. Any invocation of this method with an
+      // ArrayBuffer argument that does not throw an exception must
+      // increase the bufferedAmount attribute by the length of the
+      // ArrayBuffer in bytes.
+
+      const value = Buffer.from(data)
+      const frame = new WebsocketFrameSend(value)
+      const buffer = frame.createFrame(opcodes.BINARY)
+
+      this.#bufferedAmount += value.byteLength
+      socket.write(buffer, () => {
+        this.#bufferedAmount -= value.byteLength
+      })
+    } else if (ArrayBuffer.isView(data)) {
+      // If the WebSocket connection is established, and the WebSocket
+      // closing handshake has not yet started, then the user agent must
+      // send a WebSocket Message comprised of data using a binary frame
+      // opcode; if the data cannot be sent, e.g. because it would need to
+      // be buffered but the buffer is full, the user agent must flag the
+      // WebSocket as full and then close the WebSocket connection. The
+      // data to be sent is the data stored in the section of the buffer
+      // described by the ArrayBuffer object that data references. Any
+      // invocation of this method with this kind of argument that does
+      // not throw an exception must increase the bufferedAmount attribute
+      // by the length of data’s buffer in bytes.
+
+      const ab = Buffer.from(data, data.byteOffset, data.byteLength)
+
+      const frame = new WebsocketFrameSend(ab)
+      const buffer = frame.createFrame(opcodes.BINARY)
+
+      this.#bufferedAmount += ab.byteLength
+      socket.write(buffer, () => {
+        this.#bufferedAmount -= ab.byteLength
+      })
+    } else if (isBlobLike(data)) {
+      // If the WebSocket connection is established, and the WebSocket
+      // closing handshake has not yet started, then the user agent must
+      // send a WebSocket Message comprised of data using a binary frame
+      // opcode; if the data cannot be sent, e.g. because it would need to
+      // be buffered but the buffer is full, the user agent must flag the
+      // WebSocket as full and then close the WebSocket connection. The data
+      // to be sent is the raw data represented by the Blob object. Any
+      // invocation of this method with a Blob argument that does not throw
+      // an exception must increase the bufferedAmount attribute by the size
+      // of the Blob object’s raw data, in bytes.
+
+      const frame = new WebsocketFrameSend()
+
+      data.arrayBuffer().then((ab) => {
+        const value = Buffer.from(ab)
+        frame.frameData = value
+        const buffer = frame.createFrame(opcodes.BINARY)
+
+        this.#bufferedAmount += value.byteLength
+        socket.write(buffer, () => {
+          this.#bufferedAmount -= value.byteLength
+        })
+      })
+    }
+  }
+
+  get readyState () {
+    webidl.brandCheck(this, WebSocket)
+
+    // The readyState getter steps are to return this's ready state.
+    return this[kReadyState]
+  }
+
+  get bufferedAmount () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#bufferedAmount
+  }
+
+  get url () {
+    webidl.brandCheck(this, WebSocket)
+
+    // The url getter steps are to return this's url, serialized.
+    return URLSerializer(this[kWebSocketURL])
+  }
+
+  get extensions () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#extensions
+  }
+
+  get protocol () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#protocol
+  }
+
+  get onopen () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#events.open
+  }
+
+  set onopen (fn) {
+    webidl.brandCheck(this, WebSocket)
+
+    if (this.#events.open) {
+      this.removeEventListener('open', this.#events.open)
+    }
+
+    if (typeof fn === 'function') {
+      this.#events.open = fn
+      this.addEventListener('open', fn)
+    } else {
+      this.#events.open = null
+    }
+  }
+
+  get onerror () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#events.error
+  }
+
+  set onerror (fn) {
+    webidl.brandCheck(this, WebSocket)
+
+    if (this.#events.error) {
+      this.removeEventListener('error', this.#events.error)
+    }
+
+    if (typeof fn === 'function') {
+      this.#events.error = fn
+      this.addEventListener('error', fn)
+    } else {
+      this.#events.error = null
+    }
+  }
+
+  get onclose () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#events.close
+  }
+
+  set onclose (fn) {
+    webidl.brandCheck(this, WebSocket)
+
+    if (this.#events.close) {
+      this.removeEventListener('close', this.#events.close)
+    }
+
+    if (typeof fn === 'function') {
+      this.#events.close = fn
+      this.addEventListener('close', fn)
+    } else {
+      this.#events.close = null
+    }
+  }
+
+  get onmessage () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this.#events.message
+  }
+
+  set onmessage (fn) {
+    webidl.brandCheck(this, WebSocket)
+
+    if (this.#events.message) {
+      this.removeEventListener('message', this.#events.message)
+    }
+
+    if (typeof fn === 'function') {
+      this.#events.message = fn
+      this.addEventListener('message', fn)
+    } else {
+      this.#events.message = null
+    }
+  }
+
+  get binaryType () {
+    webidl.brandCheck(this, WebSocket)
+
+    return this[kBinaryType]
+  }
+
+  set binaryType (type) {
+    webidl.brandCheck(this, WebSocket)
+
+    if (type !== 'blob' && type !== 'arraybuffer') {
+      this[kBinaryType] = 'blob'
+    } else {
+      this[kBinaryType] = type
+    }
+  }
+
+  /**
+   * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol
+   */
+  #onConnectionEstablished (response) {
+    // processResponse is called when the "response’s header list has been received and initialized."
+    // once this happens, the connection is open
+    this[kResponse] = response
+
+    const parser = new ByteParser(this)
+    parser.on('drain', function onParserDrain () {
+      this.ws[kResponse].socket.resume()
+    })
+
+    response.socket.ws = this
+    this[kByteParser] = parser
+
+    // 1. Change the ready state to OPEN (1).
+    this[kReadyState] = states.OPEN
+
+    // 2. Change the extensions attribute’s value to the extensions in use, if
+    //    it is not the null value.
+    // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1
+    const extensions = response.headersList.get('sec-websocket-extensions')
+
+    if (extensions !== null) {
+      this.#extensions = extensions
+    }
+
+    // 3. Change the protocol attribute’s value to the subprotocol in use, if
+    //    it is not the null value.
+    // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9
+    const protocol = response.headersList.get('sec-websocket-protocol')
+
+    if (protocol !== null) {
+      this.#protocol = protocol
+    }
+
+    // 4. Fire an event named open at the WebSocket object.
+    fireEvent('open', this)
+  }
+}
+
+// https://websockets.spec.whatwg.org/#dom-websocket-connecting
+WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING
+// https://websockets.spec.whatwg.org/#dom-websocket-open
+WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN
+// https://websockets.spec.whatwg.org/#dom-websocket-closing
+WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING
+// https://websockets.spec.whatwg.org/#dom-websocket-closed
+WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED
+
+Object.defineProperties(WebSocket.prototype, {
+  CONNECTING: staticPropertyDescriptors,
+  OPEN: staticPropertyDescriptors,
+  CLOSING: staticPropertyDescriptors,
+  CLOSED: staticPropertyDescriptors,
+  url: kEnumerableProperty,
+  readyState: kEnumerableProperty,
+  bufferedAmount: kEnumerableProperty,
+  onopen: kEnumerableProperty,
+  onerror: kEnumerableProperty,
+  onclose: kEnumerableProperty,
+  close: kEnumerableProperty,
+  onmessage: kEnumerableProperty,
+  binaryType: kEnumerableProperty,
+  send: kEnumerableProperty,
+  extensions: kEnumerableProperty,
+  protocol: kEnumerableProperty,
+  [Symbol.toStringTag]: {
+    value: 'WebSocket',
+    writable: false,
+    enumerable: false,
+    configurable: true
+  }
+})
+
+Object.defineProperties(WebSocket, {
+  CONNECTING: staticPropertyDescriptors,
+  OPEN: staticPropertyDescriptors,
+  CLOSING: staticPropertyDescriptors,
+  CLOSED: staticPropertyDescriptors
+})
+
+webidl.converters['sequence<DOMString>'] = webidl.sequenceConverter(
+  webidl.converters.DOMString
+)
+
+webidl.converters['DOMString or sequence<DOMString>'] = function (V) {
+  if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) {
+    return webidl.converters['sequence<DOMString>'](V)
+  }
+
+  return webidl.converters.DOMString(V)
+}
+
+// This implements the propsal made in https://github.com/whatwg/websockets/issues/42
+webidl.converters.WebSocketInit = webidl.dictionaryConverter([
+  {
+    key: 'protocols',
+    converter: webidl.converters['DOMString or sequence<DOMString>'],
+    get defaultValue () {
+      return []
+    }
+  },
+  {
+    key: 'dispatcher',
+    converter: (V) => V,
+    get defaultValue () {
+      return getGlobalDispatcher()
+    }
+  },
+  {
+    key: 'headers',
+    converter: webidl.nullableConverter(webidl.converters.HeadersInit)
+  }
+])
+
+webidl.converters['DOMString or sequence<DOMString> or WebSocketInit'] = function (V) {
+  if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) {
+    return webidl.converters.WebSocketInit(V)
+  }
+
+  return { protocols: webidl.converters['DOMString or sequence<DOMString>'](V) }
+}
+
+webidl.converters.WebSocketSendData = function (V) {
+  if (webidl.util.Type(V) === 'Object') {
+    if (isBlobLike(V)) {
+      return webidl.converters.Blob(V, { strict: false })
+    }
+
+    if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) {
+      return webidl.converters.BufferSource(V)
+    }
+  }
+
+  return webidl.converters.USVString(V)
+}
+
+module.exports = {
+  WebSocket
+}
+
+
+/***/ }),
+
+/***/ 2613:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("assert");
+
+/***/ }),
+
+/***/ 290:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("async_hooks");
+
+/***/ }),
+
+/***/ 181:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("buffer");
+
+/***/ }),
+
+/***/ 5317:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("child_process");
+
+/***/ }),
+
+/***/ 4236:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("console");
+
+/***/ }),
+
+/***/ 6982:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("crypto");
+
+/***/ }),
+
+/***/ 1637:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("diagnostics_channel");
+
+/***/ }),
+
+/***/ 4434:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("events");
+
+/***/ }),
+
+/***/ 9896:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("fs");
+
+/***/ }),
+
+/***/ 8611:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("http");
+
+/***/ }),
+
+/***/ 5675:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("http2");
+
+/***/ }),
+
+/***/ 5692:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("https");
+
+/***/ }),
+
+/***/ 9278:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("net");
+
+/***/ }),
+
+/***/ 8474:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:events");
+
+/***/ }),
+
+/***/ 7067:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:http");
+
+/***/ }),
+
+/***/ 4708:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:https");
+
+/***/ }),
+
+/***/ 8161:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:os");
+
+/***/ }),
+
+/***/ 1708:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:process");
+
+/***/ }),
+
+/***/ 7075:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:stream");
+
+/***/ }),
+
+/***/ 7975:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:util");
+
+/***/ }),
+
+/***/ 8522:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:zlib");
+
+/***/ }),
+
+/***/ 857:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("os");
+
+/***/ }),
+
+/***/ 6928:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("path");
+
+/***/ }),
+
+/***/ 2987:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("perf_hooks");
+
+/***/ }),
+
+/***/ 3480:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("querystring");
+
+/***/ }),
+
+/***/ 2203:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("stream");
+
+/***/ }),
+
+/***/ 3774:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("stream/web");
+
+/***/ }),
+
+/***/ 3193:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("string_decoder");
+
+/***/ }),
+
+/***/ 3557:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("timers");
+
+/***/ }),
+
+/***/ 4756:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("tls");
+
+/***/ }),
+
+/***/ 2018:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("tty");
+
+/***/ }),
+
+/***/ 7016:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("url");
+
+/***/ }),
+
+/***/ 9023:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("util");
+
+/***/ }),
+
+/***/ 8253:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("util/types");
+
+/***/ }),
+
+/***/ 8167:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("worker_threads");
+
+/***/ }),
+
+/***/ 3106:
+/***/ ((module) => {
+
+module.exports = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("zlib");
+
+/***/ }),
+
+/***/ 4346:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.AbortError = void 0;
+/**
+ * This error is thrown when an asynchronous operation has been aborted.
+ * Check for this error by testing the `name` that the name property of the
+ * error matches `"AbortError"`.
+ *
+ * @example
+ * ```ts
+ * const controller = new AbortController();
+ * controller.abort();
+ * try {
+ *   doAsyncWork(controller.signal)
+ * } catch (e) {
+ *   if (e.name === 'AbortError') {
+ *     // handle abort error here.
+ *   }
+ * }
+ * ```
+ */
+class AbortError extends Error {
+    constructor(message) {
+        super(message);
+        this.name = "AbortError";
+    }
+}
+exports.AbortError = AbortError;
+//# sourceMappingURL=AbortError.js.map
+
+/***/ }),
+
+/***/ 764:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.AbortError = void 0;
+var AbortError_js_1 = __nccwpck_require__(4346);
+Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 6268:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.AzureKeyCredential = void 0;
+/**
+ * A static-key-based credential that supports updating
+ * the underlying key value.
+ */
+class AzureKeyCredential {
+    /**
+     * The value of the key to be used in authentication
+     */
+    get key() {
+        return this._key;
+    }
+    /**
+     * Create an instance of an AzureKeyCredential for use
+     * with a service client.
+     *
+     * @param key - The initial value of the key to use in authentication
+     */
+    constructor(key) {
+        if (!key) {
+            throw new Error("key must be a non-empty string");
+        }
+        this._key = key;
+    }
+    /**
+     * Change the value of the key.
+     *
+     * Updates will take effect upon the next request after
+     * updating the key value.
+     *
+     * @param newKey - The new key value to be used
+     */
+    update(newKey) {
+        this._key = newKey;
+    }
+}
+exports.AzureKeyCredential = AzureKeyCredential;
+//# sourceMappingURL=azureKeyCredential.js.map
+
+/***/ }),
+
+/***/ 1421:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.AzureNamedKeyCredential = void 0;
+exports.isNamedKeyCredential = isNamedKeyCredential;
+const core_util_1 = __nccwpck_require__(402);
+/**
+ * A static name/key-based credential that supports updating
+ * the underlying name and key values.
+ */
+class AzureNamedKeyCredential {
+    /**
+     * The value of the key to be used in authentication.
+     */
+    get key() {
+        return this._key;
+    }
+    /**
+     * The value of the name to be used in authentication.
+     */
+    get name() {
+        return this._name;
+    }
+    /**
+     * Create an instance of an AzureNamedKeyCredential for use
+     * with a service client.
+     *
+     * @param name - The initial value of the name to use in authentication.
+     * @param key - The initial value of the key to use in authentication.
+     */
+    constructor(name, key) {
+        if (!name || !key) {
+            throw new TypeError("name and key must be non-empty strings");
+        }
+        this._name = name;
+        this._key = key;
+    }
+    /**
+     * Change the value of the key.
+     *
+     * Updates will take effect upon the next request after
+     * updating the key value.
+     *
+     * @param newName - The new name value to be used.
+     * @param newKey - The new key value to be used.
+     */
+    update(newName, newKey) {
+        if (!newName || !newKey) {
+            throw new TypeError("newName and newKey must be non-empty strings");
+        }
+        this._name = newName;
+        this._key = newKey;
+    }
+}
+exports.AzureNamedKeyCredential = AzureNamedKeyCredential;
+/**
+ * Tests an object to determine whether it implements NamedKeyCredential.
+ *
+ * @param credential - The assumed NamedKeyCredential to be tested.
+ */
+function isNamedKeyCredential(credential) {
+    return ((0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) &&
+        typeof credential.key === "string" &&
+        typeof credential.name === "string");
+}
+//# sourceMappingURL=azureNamedKeyCredential.js.map
+
+/***/ }),
+
+/***/ 9690:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.AzureSASCredential = void 0;
+exports.isSASCredential = isSASCredential;
+const core_util_1 = __nccwpck_require__(402);
+/**
+ * A static-signature-based credential that supports updating
+ * the underlying signature value.
+ */
+class AzureSASCredential {
+    /**
+     * The value of the shared access signature to be used in authentication
+     */
+    get signature() {
+        return this._signature;
+    }
+    /**
+     * Create an instance of an AzureSASCredential for use
+     * with a service client.
+     *
+     * @param signature - The initial value of the shared access signature to use in authentication
+     */
+    constructor(signature) {
+        if (!signature) {
+            throw new Error("shared access signature must be a non-empty string");
+        }
+        this._signature = signature;
+    }
+    /**
+     * Change the value of the signature.
+     *
+     * Updates will take effect upon the next request after
+     * updating the signature value.
+     *
+     * @param newSignature - The new shared access signature value to be used
+     */
+    update(newSignature) {
+        if (!newSignature) {
+            throw new Error("shared access signature must be a non-empty string");
+        }
+        this._signature = newSignature;
+    }
+}
+exports.AzureSASCredential = AzureSASCredential;
+/**
+ * Tests an object to determine whether it implements SASCredential.
+ *
+ * @param credential - The assumed SASCredential to be tested.
+ */
+function isSASCredential(credential) {
+    return ((0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string");
+}
+//# sourceMappingURL=azureSASCredential.js.map
+
+/***/ }),
+
+/***/ 9967:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isTokenCredential = exports.isSASCredential = exports.AzureSASCredential = exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = exports.isKeyCredential = exports.AzureKeyCredential = void 0;
+var azureKeyCredential_js_1 = __nccwpck_require__(6268);
+Object.defineProperty(exports, "AzureKeyCredential", ({ enumerable: true, get: function () { return azureKeyCredential_js_1.AzureKeyCredential; } }));
+var keyCredential_js_1 = __nccwpck_require__(1117);
+Object.defineProperty(exports, "isKeyCredential", ({ enumerable: true, get: function () { return keyCredential_js_1.isKeyCredential; } }));
+var azureNamedKeyCredential_js_1 = __nccwpck_require__(1421);
+Object.defineProperty(exports, "AzureNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; } }));
+Object.defineProperty(exports, "isNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.isNamedKeyCredential; } }));
+var azureSASCredential_js_1 = __nccwpck_require__(9690);
+Object.defineProperty(exports, "AzureSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.AzureSASCredential; } }));
+Object.defineProperty(exports, "isSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.isSASCredential; } }));
+var tokenCredential_js_1 = __nccwpck_require__(4547);
+Object.defineProperty(exports, "isTokenCredential", ({ enumerable: true, get: function () { return tokenCredential_js_1.isTokenCredential; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 1117:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isKeyCredential = isKeyCredential;
+const core_util_1 = __nccwpck_require__(402);
+/**
+ * Tests an object to determine whether it implements KeyCredential.
+ *
+ * @param credential - The assumed KeyCredential to be tested.
+ */
+function isKeyCredential(credential) {
+    return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string";
+}
+//# sourceMappingURL=keyCredential.js.map
+
+/***/ }),
+
+/***/ 4547:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isBearerToken = isBearerToken;
+exports.isPopToken = isPopToken;
+exports.isTokenCredential = isTokenCredential;
+/**
+ * @internal
+ * @param accessToken - Access token
+ * @returns Whether a token is bearer type or not
+ */
+function isBearerToken(accessToken) {
+    return !accessToken.tokenType || accessToken.tokenType === "Bearer";
+}
+/**
+ * @internal
+ * @param accessToken - Access token
+ * @returns Whether a token is Pop token or not
+ */
+function isPopToken(accessToken) {
+    return accessToken.tokenType === "pop";
+}
+/**
+ * Tests an object to determine whether it implements TokenCredential.
+ *
+ * @param credential - The assumed TokenCredential to be tested.
+ */
+function isTokenCredential(credential) {
+    // Check for an object with a 'getToken' function and possibly with
+    // a 'signRequest' function.  We do this check to make sure that
+    // a ServiceClientCredentials implementor (like TokenClientCredentials
+    // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if
+    // it doesn't actually implement TokenCredential also.
+    const castCredential = credential;
+    return (castCredential &&
+        typeof castCredential.getToken === "function" &&
+        (castCredential.signRequest === undefined || castCredential.getToken.length > 0));
+}
+//# sourceMappingURL=tokenCredential.js.map
+
+/***/ }),
+
+/***/ 4521:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.authorizeRequestOnClaimChallenge = exports.parseCAEChallenge = void 0;
+const log_js_1 = __nccwpck_require__(5151);
+const base64_js_1 = __nccwpck_require__(3582);
+/**
+ * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`.
+ * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`.
+ *
+ * @internal
+ */
+function parseCAEChallenge(challenges) {
+    const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x);
+    return bearerChallenges.map((challenge) => {
+        const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x);
+        const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="')));
+        // Key-value pairs to plain object:
+        return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {});
+    });
+}
+exports.parseCAEChallenge = parseCAEChallenge;
+/**
+ * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges:
+ * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation).
+ *
+ * Call the `bearerTokenAuthenticationPolicy` with the following options:
+ *
+ * ```ts
+ * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline";
+ * import { authorizeRequestOnClaimChallenge } from "@azure/core-client";
+ *
+ * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({
+ *   authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge
+ * });
+ * ```
+ *
+ * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges.
+ * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM.
+ *
+ * Example challenge with claims:
+ *
+ * ```
+ * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token",
+ * error_description="User session has been revoked",
+ * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0="
+ * ```
+ */
+async function authorizeRequestOnClaimChallenge(onChallengeOptions) {
+    const { scopes, response } = onChallengeOptions;
+    const logger = onChallengeOptions.logger || log_js_1.logger;
+    const challenge = response.headers.get("WWW-Authenticate");
+    if (!challenge) {
+        logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`);
+        return false;
+    }
+    const challenges = parseCAEChallenge(challenge) || [];
+    const parsedChallenge = challenges.find((x) => x.claims);
+    if (!parsedChallenge) {
+        logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`);
+        return false;
+    }
+    const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, {
+        claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims),
+    });
+    if (!accessToken) {
+        return false;
+    }
+    onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`);
+    return true;
+}
+exports.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge;
+//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map
+
+/***/ }),
+
+/***/ 6851:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.authorizeRequestOnTenantChallenge = void 0;
+/**
+ * A set of constants used internally when processing requests.
+ */
+const Constants = {
+    DefaultScope: "/.default",
+    /**
+     * Defines constants for use with HTTP headers.
+     */
+    HeaderConstants: {
+        /**
+         * The Authorization header.
+         */
+        AUTHORIZATION: "authorization",
+    },
+};
+function isUuid(text) {
+    return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text);
+}
+/**
+ * Defines a callback to handle auth challenge for Storage APIs.
+ * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge
+ * Handling has specific features for storage that departs to the general AAD challenge docs.
+ **/
+const authorizeRequestOnTenantChallenge = async (challengeOptions) => {
+    const requestOptions = requestToOptions(challengeOptions.request);
+    const challenge = getChallenge(challengeOptions.response);
+    if (challenge) {
+        const challengeInfo = parseChallenge(challenge);
+        const challengeScopes = buildScopes(challengeOptions, challengeInfo);
+        const tenantId = extractTenantId(challengeInfo);
+        if (!tenantId) {
+            return false;
+        }
+        const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId }));
+        if (!accessToken) {
+            return false;
+        }
+        challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`);
+        return true;
+    }
+    return false;
+};
+exports.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge;
+/**
+ * Extracts the tenant id from the challenge information
+ * The tenant id is contained in the authorization_uri as the first
+ * path part.
+ */
+function extractTenantId(challengeInfo) {
+    const parsedAuthUri = new URL(challengeInfo.authorization_uri);
+    const pathSegments = parsedAuthUri.pathname.split("/");
+    const tenantId = pathSegments[1];
+    if (tenantId && isUuid(tenantId)) {
+        return tenantId;
+    }
+    return undefined;
+}
+/**
+ * Builds the authentication scopes based on the information that comes in the
+ * challenge information. Scopes url is present in the resource_id, if it is empty
+ * we keep using the original scopes.
+ */
+function buildScopes(challengeOptions, challengeInfo) {
+    if (!challengeInfo.resource_id) {
+        return challengeOptions.scopes;
+    }
+    const challengeScopes = new URL(challengeInfo.resource_id);
+    challengeScopes.pathname = Constants.DefaultScope;
+    let scope = challengeScopes.toString();
+    if (scope === "https://disk.azure.com/.default") {
+        // the extra slash is required by the service
+        scope = "https://disk.azure.com//.default";
+    }
+    return [scope];
+}
+/**
+ * We will retrieve the challenge only if the response status code was 401,
+ * and if the response contained the header "WWW-Authenticate" with a non-empty value.
+ */
+function getChallenge(response) {
+    const challenge = response.headers.get("WWW-Authenticate");
+    if (response.status === 401 && challenge) {
+        return challenge;
+    }
+    return;
+}
+/**
+ * Converts: `Bearer a="b" c="d"`.
+ * Into: `[ { a: 'b', c: 'd' }]`.
+ *
+ * @internal
+ */
+function parseChallenge(challenge) {
+    const bearerChallenge = challenge.slice("Bearer ".length);
+    const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x);
+    const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("=")));
+    // Key-value pairs to plain object:
+    return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {});
+}
+/**
+ * Extracts the options form a Pipeline Request for later re-use
+ */
+function requestToOptions(request) {
+    return {
+        abortSignal: request.abortSignal,
+        requestOptions: {
+            timeout: request.timeout,
+        },
+        tracingOptions: request.tracingOptions,
+    };
+}
+//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map
+
+/***/ }),
+
+/***/ 3582:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.decodeStringToString = exports.decodeString = exports.encodeByteArray = exports.encodeString = void 0;
+/**
+ * Encodes a string in base64 format.
+ * @param value - the string to encode
+ * @internal
+ */
+function encodeString(value) {
+    return Buffer.from(value).toString("base64");
+}
+exports.encodeString = encodeString;
+/**
+ * Encodes a byte array in base64 format.
+ * @param value - the Uint8Aray to encode
+ * @internal
+ */
+function encodeByteArray(value) {
+    const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer);
+    return bufferValue.toString("base64");
+}
+exports.encodeByteArray = encodeByteArray;
+/**
+ * Decodes a base64 string into a byte array.
+ * @param value - the base64 string to decode
+ * @internal
+ */
+function decodeString(value) {
+    return Buffer.from(value, "base64");
+}
+exports.decodeString = decodeString;
+/**
+ * Decodes a base64 string into a string.
+ * @param value - the base64 string to decode
+ * @internal
+ */
+function decodeStringToString(value) {
+    return Buffer.from(value, "base64").toString();
+}
+exports.decodeStringToString = decodeStringToString;
+//# sourceMappingURL=base64.js.map
+
+/***/ }),
+
+/***/ 8290:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.deserializationPolicy = exports.deserializationPolicyName = void 0;
+const interfaces_js_1 = __nccwpck_require__(4617);
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+const serializer_js_1 = __nccwpck_require__(8913);
+const operationHelpers_js_1 = __nccwpck_require__(5047);
+const defaultJsonContentTypes = ["application/json", "text/json"];
+const defaultXmlContentTypes = ["application/xml", "application/atom+xml"];
+/**
+ * The programmatic identifier of the deserializationPolicy.
+ */
+exports.deserializationPolicyName = "deserializationPolicy";
+/**
+ * This policy handles parsing out responses according to OperationSpecs on the request.
+ */
+function deserializationPolicy(options = {}) {
+    var _a, _b, _c, _d, _e, _f, _g;
+    const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes;
+    const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes;
+    const parseXML = options.parseXML;
+    const serializerOptions = options.serializerOptions;
+    const updatedOptions = {
+        xml: {
+            rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "",
+            includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false,
+            xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : interfaces_js_1.XML_CHARKEY,
+        },
+    };
+    return {
+        name: exports.deserializationPolicyName,
+        async sendRequest(request, next) {
+            const response = await next(request);
+            return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML);
+        },
+    };
+}
+exports.deserializationPolicy = deserializationPolicy;
+function getOperationResponseMap(parsedResponse) {
+    let result;
+    const request = parsedResponse.request;
+    const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request);
+    const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec;
+    if (operationSpec) {
+        if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) {
+            result = operationSpec.responses[parsedResponse.status];
+        }
+        else {
+            result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse);
+        }
+    }
+    return result;
+}
+function shouldDeserializeResponse(parsedResponse) {
+    const request = parsedResponse.request;
+    const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request);
+    const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize;
+    let result;
+    if (shouldDeserialize === undefined) {
+        result = true;
+    }
+    else if (typeof shouldDeserialize === "boolean") {
+        result = shouldDeserialize;
+    }
+    else {
+        result = shouldDeserialize(parsedResponse);
+    }
+    return result;
+}
+async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) {
+    const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML);
+    if (!shouldDeserializeResponse(parsedResponse)) {
+        return parsedResponse;
+    }
+    const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request);
+    const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec;
+    if (!operationSpec || !operationSpec.responses) {
+        return parsedResponse;
+    }
+    const responseSpec = getOperationResponseMap(parsedResponse);
+    const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options);
+    if (error) {
+        throw error;
+    }
+    else if (shouldReturnResponse) {
+        return parsedResponse;
+    }
+    // An operation response spec does exist for current status code, so
+    // use it to deserialize the response.
+    if (responseSpec) {
+        if (responseSpec.bodyMapper) {
+            let valueToDeserialize = parsedResponse.parsedBody;
+            if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) {
+                valueToDeserialize =
+                    typeof valueToDeserialize === "object"
+                        ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName]
+                        : [];
+            }
+            try {
+                parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options);
+            }
+            catch (deserializeError) {
+                const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, {
+                    statusCode: parsedResponse.status,
+                    request: parsedResponse.request,
+                    response: parsedResponse,
+                });
+                throw restError;
+            }
+        }
+        else if (operationSpec.httpMethod === "HEAD") {
+            // head methods never have a body, but we return a boolean to indicate presence/absence of the resource
+            parsedResponse.parsedBody = response.status >= 200 && response.status < 300;
+        }
+        if (responseSpec.headersMapper) {
+            parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true });
+        }
+    }
+    return parsedResponse;
+}
+function isOperationSpecEmpty(operationSpec) {
+    const expectedStatusCodes = Object.keys(operationSpec.responses);
+    return (expectedStatusCodes.length === 0 ||
+        (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"));
+}
+function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) {
+    var _a;
+    const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;
+    const isExpectedStatusCode = isOperationSpecEmpty(operationSpec)
+        ? isSuccessByStatus
+        : !!responseSpec;
+    if (isExpectedStatusCode) {
+        if (responseSpec) {
+            if (!responseSpec.isError) {
+                return { error: null, shouldReturnResponse: false };
+            }
+        }
+        else {
+            return { error: null, shouldReturnResponse: false };
+        }
+    }
+    const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default;
+    const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status))
+        ? `Unexpected status code: ${parsedResponse.status}`
+        : parsedResponse.bodyAsText;
+    const error = new core_rest_pipeline_1.RestError(initialErrorMessage, {
+        statusCode: parsedResponse.status,
+        request: parsedResponse.request,
+        response: parsedResponse,
+    });
+    // If the item failed but there's no error spec or default spec to deserialize the error,
+    // we should fail so we just throw the parsed response
+    if (!errorResponseSpec) {
+        throw error;
+    }
+    const defaultBodyMapper = errorResponseSpec.bodyMapper;
+    const defaultHeadersMapper = errorResponseSpec.headersMapper;
+    try {
+        // If error response has a body, try to deserialize it using default body mapper.
+        // Then try to extract error code & message from it
+        if (parsedResponse.parsedBody) {
+            const parsedBody = parsedResponse.parsedBody;
+            let deserializedError;
+            if (defaultBodyMapper) {
+                let valueToDeserialize = parsedBody;
+                if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) {
+                    valueToDeserialize = [];
+                    const elementName = defaultBodyMapper.xmlElementName;
+                    if (typeof parsedBody === "object" && elementName) {
+                        valueToDeserialize = parsedBody[elementName];
+                    }
+                }
+                deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options);
+            }
+            const internalError = parsedBody.error || deserializedError || parsedBody;
+            error.code = internalError.code;
+            if (internalError.message) {
+                error.message = internalError.message;
+            }
+            if (defaultBodyMapper) {
+                error.response.parsedBody = deserializedError;
+            }
+        }
+        // If error response has headers, try to deserialize it using default header mapper
+        if (parsedResponse.headers && defaultHeadersMapper) {
+            error.response.parsedHeaders =
+                operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders");
+        }
+    }
+    catch (defaultError) {
+        error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`;
+    }
+    return { error, shouldReturnResponse: false };
+}
+async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) {
+    var _a;
+    if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) &&
+        operationResponse.bodyAsText) {
+        const text = operationResponse.bodyAsText;
+        const contentType = operationResponse.headers.get("Content-Type") || "";
+        const contentComponents = !contentType
+            ? []
+            : contentType.split(";").map((component) => component.toLowerCase());
+        try {
+            if (contentComponents.length === 0 ||
+                contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) {
+                operationResponse.parsedBody = JSON.parse(text);
+                return operationResponse;
+            }
+            else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {
+                if (!parseXML) {
+                    throw new Error("Parsing XML not supported.");
+                }
+                const body = await parseXML(text, opts.xml);
+                operationResponse.parsedBody = body;
+                return operationResponse;
+            }
+        }
+        catch (err) {
+            const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;
+            const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR;
+            const e = new core_rest_pipeline_1.RestError(msg, {
+                code: errCode,
+                statusCode: operationResponse.status,
+                request: operationResponse.request,
+                response: operationResponse,
+            });
+            throw e;
+        }
+    }
+    return operationResponse;
+}
+//# sourceMappingURL=deserializationPolicy.js.map
+
+/***/ }),
+
+/***/ 5210:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getCachedDefaultHttpClient = void 0;
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+let cachedHttpClient;
+function getCachedDefaultHttpClient() {
+    if (!cachedHttpClient) {
+        cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)();
+    }
+    return cachedHttpClient;
+}
+exports.getCachedDefaultHttpClient = getCachedDefaultHttpClient;
+//# sourceMappingURL=httpClientCache.js.map
+
+/***/ }),
+
+/***/ 8953:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.authorizeRequestOnTenantChallenge = exports.authorizeRequestOnClaimChallenge = exports.serializationPolicyName = exports.serializationPolicy = exports.deserializationPolicyName = exports.deserializationPolicy = exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.createClientPipeline = exports.ServiceClient = exports.MapperTypeNames = exports.createSerializer = void 0;
+var serializer_js_1 = __nccwpck_require__(8913);
+Object.defineProperty(exports, "createSerializer", ({ enumerable: true, get: function () { return serializer_js_1.createSerializer; } }));
+Object.defineProperty(exports, "MapperTypeNames", ({ enumerable: true, get: function () { return serializer_js_1.MapperTypeNames; } }));
+var serviceClient_js_1 = __nccwpck_require__(317);
+Object.defineProperty(exports, "ServiceClient", ({ enumerable: true, get: function () { return serviceClient_js_1.ServiceClient; } }));
+var pipeline_js_1 = __nccwpck_require__(4971);
+Object.defineProperty(exports, "createClientPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createClientPipeline; } }));
+var interfaces_js_1 = __nccwpck_require__(4617);
+Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_ATTRKEY; } }));
+Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_CHARKEY; } }));
+var deserializationPolicy_js_1 = __nccwpck_require__(8290);
+Object.defineProperty(exports, "deserializationPolicy", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicy; } }));
+Object.defineProperty(exports, "deserializationPolicyName", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicyName; } }));
+var serializationPolicy_js_1 = __nccwpck_require__(7895);
+Object.defineProperty(exports, "serializationPolicy", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicy; } }));
+Object.defineProperty(exports, "serializationPolicyName", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicyName; } }));
+var authorizeRequestOnClaimChallenge_js_1 = __nccwpck_require__(4521);
+Object.defineProperty(exports, "authorizeRequestOnClaimChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; } }));
+var authorizeRequestOnTenantChallenge_js_1 = __nccwpck_require__(6851);
+Object.defineProperty(exports, "authorizeRequestOnTenantChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 8785:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getPathStringFromParameter = exports.getStreamingResponseStatusCodes = void 0;
+const serializer_js_1 = __nccwpck_require__(8913);
+/**
+ * Gets the list of status codes for streaming responses.
+ * @internal
+ */
+function getStreamingResponseStatusCodes(operationSpec) {
+    const result = new Set();
+    for (const statusCode in operationSpec.responses) {
+        const operationResponse = operationSpec.responses[statusCode];
+        if (operationResponse.bodyMapper &&
+            operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) {
+            result.add(Number(statusCode));
+        }
+    }
+    return result;
+}
+exports.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes;
+/**
+ * Get the path to this parameter's value as a dotted string (a.b.c).
+ * @param parameter - The parameter to get the path string for.
+ * @returns The path to this parameter's value as a dotted string.
+ * @internal
+ */
+function getPathStringFromParameter(parameter) {
+    const { parameterPath, mapper } = parameter;
+    let result;
+    if (typeof parameterPath === "string") {
+        result = parameterPath;
+    }
+    else if (Array.isArray(parameterPath)) {
+        result = parameterPath.join(".");
+    }
+    else {
+        result = mapper.serializedName;
+    }
+    return result;
+}
+exports.getPathStringFromParameter = getPathStringFromParameter;
+//# sourceMappingURL=interfaceHelpers.js.map
+
+/***/ }),
+
+/***/ 4617:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0;
+/**
+ * Default key used to access the XML attributes.
+ */
+exports.XML_ATTRKEY = "$";
+/**
+ * Default key used to access the XML value content.
+ */
+exports.XML_CHARKEY = "_";
+//# sourceMappingURL=interfaces.js.map
+
+/***/ }),
+
+/***/ 5151:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.logger = void 0;
+const logger_1 = __nccwpck_require__(5851);
+exports.logger = (0, logger_1.createClientLogger)("core-client");
+//# sourceMappingURL=log.js.map
+
+/***/ }),
+
+/***/ 5047:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOperationRequestInfo = exports.getOperationArgumentValueFromParameter = void 0;
+const state_js_1 = __nccwpck_require__(6204);
+/**
+ * @internal
+ * Retrieves the value to use for a given operation argument
+ * @param operationArguments - The arguments passed from the generated client
+ * @param parameter - The parameter description
+ * @param fallbackObject - If something isn't found in the arguments bag, look here.
+ *  Generally used to look at the service client properties.
+ */
+function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) {
+    let parameterPath = parameter.parameterPath;
+    const parameterMapper = parameter.mapper;
+    let value;
+    if (typeof parameterPath === "string") {
+        parameterPath = [parameterPath];
+    }
+    if (Array.isArray(parameterPath)) {
+        if (parameterPath.length > 0) {
+            if (parameterMapper.isConstant) {
+                value = parameterMapper.defaultValue;
+            }
+            else {
+                let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath);
+                if (!propertySearchResult.propertyFound && fallbackObject) {
+                    propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath);
+                }
+                let useDefaultValue = false;
+                if (!propertySearchResult.propertyFound) {
+                    useDefaultValue =
+                        parameterMapper.required ||
+                            (parameterPath[0] === "options" && parameterPath.length === 2);
+                }
+                value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;
+            }
+        }
+    }
+    else {
+        if (parameterMapper.required) {
+            value = {};
+        }
+        for (const propertyName in parameterPath) {
+            const propertyMapper = parameterMapper.type.modelProperties[propertyName];
+            const propertyPath = parameterPath[propertyName];
+            const propertyValue = getOperationArgumentValueFromParameter(operationArguments, {
+                parameterPath: propertyPath,
+                mapper: propertyMapper,
+            }, fallbackObject);
+            if (propertyValue !== undefined) {
+                if (!value) {
+                    value = {};
+                }
+                value[propertyName] = propertyValue;
+            }
+        }
+    }
+    return value;
+}
+exports.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter;
+function getPropertyFromParameterPath(parent, parameterPath) {
+    const result = { propertyFound: false };
+    let i = 0;
+    for (; i < parameterPath.length; ++i) {
+        const parameterPathPart = parameterPath[i];
+        // Make sure to check inherited properties too, so don't use hasOwnProperty().
+        if (parent && parameterPathPart in parent) {
+            parent = parent[parameterPathPart];
+        }
+        else {
+            break;
+        }
+    }
+    if (i === parameterPath.length) {
+        result.propertyValue = parent;
+        result.propertyFound = true;
+    }
+    return result;
+}
+const originalRequestSymbol = Symbol.for("@azure/core-client original request");
+function hasOriginalRequest(request) {
+    return originalRequestSymbol in request;
+}
+function getOperationRequestInfo(request) {
+    if (hasOriginalRequest(request)) {
+        return getOperationRequestInfo(request[originalRequestSymbol]);
+    }
+    let info = state_js_1.state.operationRequestMap.get(request);
+    if (!info) {
+        info = {};
+        state_js_1.state.operationRequestMap.set(request, info);
+    }
+    return info;
+}
+exports.getOperationRequestInfo = getOperationRequestInfo;
+//# sourceMappingURL=operationHelpers.js.map
+
+/***/ }),
+
+/***/ 4971:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createClientPipeline = void 0;
+const deserializationPolicy_js_1 = __nccwpck_require__(8290);
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+const serializationPolicy_js_1 = __nccwpck_require__(7895);
+/**
+ * Creates a new Pipeline for use with a Service Client.
+ * Adds in deserializationPolicy by default.
+ * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential.
+ * @param options - Options to customize the created pipeline.
+ */
+function createClientPipeline(options = {}) {
+    const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options !== null && options !== void 0 ? options : {});
+    if (options.credentialOptions) {
+        pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({
+            credential: options.credentialOptions.credential,
+            scopes: options.credentialOptions.credentialScopes,
+        }));
+    }
+    pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" });
+    pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), {
+        phase: "Deserialize",
+    });
+    return pipeline;
+}
+exports.createClientPipeline = createClientPipeline;
+//# sourceMappingURL=pipeline.js.map
+
+/***/ }),
+
+/***/ 7895:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.serializeRequestBody = exports.serializeHeaders = exports.serializationPolicy = exports.serializationPolicyName = void 0;
+const interfaces_js_1 = __nccwpck_require__(4617);
+const operationHelpers_js_1 = __nccwpck_require__(5047);
+const serializer_js_1 = __nccwpck_require__(8913);
+const interfaceHelpers_js_1 = __nccwpck_require__(8785);
+/**
+ * The programmatic identifier of the serializationPolicy.
+ */
+exports.serializationPolicyName = "serializationPolicy";
+/**
+ * This policy handles assembling the request body and headers using
+ * an OperationSpec and OperationArguments on the request.
+ */
+function serializationPolicy(options = {}) {
+    const stringifyXML = options.stringifyXML;
+    return {
+        name: exports.serializationPolicyName,
+        async sendRequest(request, next) {
+            const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request);
+            const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec;
+            const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments;
+            if (operationSpec && operationArguments) {
+                serializeHeaders(request, operationArguments, operationSpec);
+                serializeRequestBody(request, operationArguments, operationSpec, stringifyXML);
+            }
+            return next(request);
+        },
+    };
+}
+exports.serializationPolicy = serializationPolicy;
+/**
+ * @internal
+ */
+function serializeHeaders(request, operationArguments, operationSpec) {
+    var _a, _b;
+    if (operationSpec.headerParameters) {
+        for (const headerParameter of operationSpec.headerParameters) {
+            let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter);
+            if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) {
+                headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter));
+                const headerCollectionPrefix = headerParameter.mapper
+                    .headerCollectionPrefix;
+                if (headerCollectionPrefix) {
+                    for (const key of Object.keys(headerValue)) {
+                        request.headers.set(headerCollectionPrefix + key, headerValue[key]);
+                    }
+                }
+                else {
+                    request.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue);
+                }
+            }
+        }
+    }
+    const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders;
+    if (customHeaders) {
+        for (const customHeaderName of Object.keys(customHeaders)) {
+            request.headers.set(customHeaderName, customHeaders[customHeaderName]);
+        }
+    }
+}
+exports.serializeHeaders = serializeHeaders;
+/**
+ * @internal
+ */
+function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () {
+    throw new Error("XML serialization unsupported!");
+}) {
+    var _a, _b, _c, _d, _e;
+    const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions;
+    const updatedOptions = {
+        xml: {
+            rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "",
+            includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false,
+            xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : interfaces_js_1.XML_CHARKEY,
+        },
+    };
+    const xmlCharKey = updatedOptions.xml.xmlCharKey;
+    if (operationSpec.requestBody && operationSpec.requestBody.mapper) {
+        request.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody);
+        const bodyMapper = operationSpec.requestBody.mapper;
+        const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper;
+        const typeName = bodyMapper.type.name;
+        try {
+            if ((request.body !== undefined && request.body !== null) ||
+                (nullable && request.body === null) ||
+                required) {
+                const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody);
+                request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions);
+                const isStream = typeName === serializer_js_1.MapperTypeNames.Stream;
+                if (operationSpec.isXML) {
+                    const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns";
+                    const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions);
+                    if (typeName === serializer_js_1.MapperTypeNames.Sequence) {
+                        request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey });
+                    }
+                    else if (!isStream) {
+                        request.body = stringifyXML(value, {
+                            rootName: xmlName || serializedName,
+                            xmlCharKey,
+                        });
+                    }
+                }
+                else if (typeName === serializer_js_1.MapperTypeNames.String &&
+                    (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) {
+                    // the String serializer has validated that request body is a string
+                    // so just send the string.
+                    return;
+                }
+                else if (!isStream) {
+                    request.body = JSON.stringify(request.body);
+                }
+            }
+        }
+        catch (error) {
+            throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, "  ")}.`);
+        }
+    }
+    else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {
+        request.formData = {};
+        for (const formDataParameter of operationSpec.formDataParameters) {
+            const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter);
+            if (formDataParameterValue !== undefined && formDataParameterValue !== null) {
+                const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter);
+                request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions);
+            }
+        }
+    }
+}
+exports.serializeRequestBody = serializeRequestBody;
+/**
+ * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself
+ */
+function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) {
+    // Composite and Sequence schemas already got their root namespace set during serialization
+    // We just need to add xmlns to the other schema types
+    if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) {
+        const result = {};
+        result[options.xml.xmlCharKey] = serializedValue;
+        result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };
+        return result;
+    }
+    return serializedValue;
+}
+function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) {
+    if (!Array.isArray(obj)) {
+        obj = [obj];
+    }
+    if (!xmlNamespaceKey || !xmlNamespace) {
+        return { [elementName]: obj };
+    }
+    const result = { [elementName]: obj };
+    result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };
+    return result;
+}
+//# sourceMappingURL=serializationPolicy.js.map
+
+/***/ }),
+
+/***/ 8913:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.MapperTypeNames = exports.createSerializer = void 0;
+const tslib_1 = __nccwpck_require__(1577);
+const base64 = tslib_1.__importStar(__nccwpck_require__(3582));
+const interfaces_js_1 = __nccwpck_require__(4617);
+const utils_js_1 = __nccwpck_require__(7316);
+class SerializerImpl {
+    constructor(modelMappers = {}, isXML = false) {
+        this.modelMappers = modelMappers;
+        this.isXML = isXML;
+    }
+    /**
+     * @deprecated Removing the constraints validation on client side.
+     */
+    validateConstraints(mapper, value, objectName) {
+        const failValidation = (constraintName, constraintValue) => {
+            throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`);
+        };
+        if (mapper.constraints && value !== undefined && value !== null) {
+            const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints;
+            if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) {
+                failValidation("ExclusiveMaximum", ExclusiveMaximum);
+            }
+            if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) {
+                failValidation("ExclusiveMinimum", ExclusiveMinimum);
+            }
+            if (InclusiveMaximum !== undefined && value > InclusiveMaximum) {
+                failValidation("InclusiveMaximum", InclusiveMaximum);
+            }
+            if (InclusiveMinimum !== undefined && value < InclusiveMinimum) {
+                failValidation("InclusiveMinimum", InclusiveMinimum);
+            }
+            if (MaxItems !== undefined && value.length > MaxItems) {
+                failValidation("MaxItems", MaxItems);
+            }
+            if (MaxLength !== undefined && value.length > MaxLength) {
+                failValidation("MaxLength", MaxLength);
+            }
+            if (MinItems !== undefined && value.length < MinItems) {
+                failValidation("MinItems", MinItems);
+            }
+            if (MinLength !== undefined && value.length < MinLength) {
+                failValidation("MinLength", MinLength);
+            }
+            if (MultipleOf !== undefined && value % MultipleOf !== 0) {
+                failValidation("MultipleOf", MultipleOf);
+            }
+            if (Pattern) {
+                const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern;
+                if (typeof value !== "string" || value.match(pattern) === null) {
+                    failValidation("Pattern", Pattern);
+                }
+            }
+            if (UniqueItems &&
+                value.some((item, i, ar) => ar.indexOf(item) !== i)) {
+                failValidation("UniqueItems", UniqueItems);
+            }
+        }
+    }
+    /**
+     * Serialize the given object based on its metadata defined in the mapper
+     *
+     * @param mapper - The mapper which defines the metadata of the serializable object
+     *
+     * @param object - A valid Javascript object to be serialized
+     *
+     * @param objectName - Name of the serialized object
+     *
+     * @param options - additional options to serialization
+     *
+     * @returns A valid serialized Javascript object
+     */
+    serialize(mapper, object, objectName, options = { xml: {} }) {
+        var _a, _b, _c;
+        const updatedOptions = {
+            xml: {
+                rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "",
+                includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false,
+                xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY,
+            },
+        };
+        let payload = {};
+        const mapperType = mapper.type.name;
+        if (!objectName) {
+            objectName = mapper.serializedName;
+        }
+        if (mapperType.match(/^Sequence$/i) !== null) {
+            payload = [];
+        }
+        if (mapper.isConstant) {
+            object = mapper.defaultValue;
+        }
+        // This table of allowed values should help explain
+        // the mapper.required and mapper.nullable properties.
+        // X means "neither undefined or null are allowed".
+        //           || required
+        //           || true      | false
+        //  nullable || ==========================
+        //      true || null      | undefined/null
+        //     false || X         | undefined
+        // undefined || X         | undefined/null
+        const { required, nullable } = mapper;
+        if (required && nullable && object === undefined) {
+            throw new Error(`${objectName} cannot be undefined.`);
+        }
+        if (required && !nullable && (object === undefined || object === null)) {
+            throw new Error(`${objectName} cannot be null or undefined.`);
+        }
+        if (!required && nullable === false && object === null) {
+            throw new Error(`${objectName} cannot be null.`);
+        }
+        if (object === undefined || object === null) {
+            payload = object;
+        }
+        else {
+            if (mapperType.match(/^any$/i) !== null) {
+                payload = object;
+            }
+            else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {
+                payload = serializeBasicTypes(mapperType, objectName, object);
+            }
+            else if (mapperType.match(/^Enum$/i) !== null) {
+                const enumMapper = mapper;
+                payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);
+            }
+            else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) {
+                payload = serializeDateTypes(mapperType, object, objectName);
+            }
+            else if (mapperType.match(/^ByteArray$/i) !== null) {
+                payload = serializeByteArrayType(objectName, object);
+            }
+            else if (mapperType.match(/^Base64Url$/i) !== null) {
+                payload = serializeBase64UrlType(objectName, object);
+            }
+            else if (mapperType.match(/^Sequence$/i) !== null) {
+                payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);
+            }
+            else if (mapperType.match(/^Dictionary$/i) !== null) {
+                payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);
+            }
+            else if (mapperType.match(/^Composite$/i) !== null) {
+                payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions);
+            }
+        }
+        return payload;
+    }
+    /**
+     * Deserialize the given object based on its metadata defined in the mapper
+     *
+     * @param mapper - The mapper which defines the metadata of the serializable object
+     *
+     * @param responseBody - A valid Javascript entity to be deserialized
+     *
+     * @param objectName - Name of the deserialized object
+     *
+     * @param options - Controls behavior of XML parser and builder.
+     *
+     * @returns A valid deserialized Javascript object
+     */
+    deserialize(mapper, responseBody, objectName, options = { xml: {} }) {
+        var _a, _b, _c, _d;
+        const updatedOptions = {
+            xml: {
+                rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "",
+                includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false,
+                xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY,
+            },
+            ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false,
+        };
+        if (responseBody === undefined || responseBody === null) {
+            if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) {
+                // Edge case for empty XML non-wrapped lists. xml2js can't distinguish
+                // between the list being empty versus being missing,
+                // so let's do the more user-friendly thing and return an empty list.
+                responseBody = [];
+            }
+            // specifically check for undefined as default value can be a falsey value `0, "", false, null`
+            if (mapper.defaultValue !== undefined) {
+                responseBody = mapper.defaultValue;
+            }
+            return responseBody;
+        }
+        let payload;
+        const mapperType = mapper.type.name;
+        if (!objectName) {
+            objectName = mapper.serializedName;
+        }
+        if (mapperType.match(/^Composite$/i) !== null) {
+            payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions);
+        }
+        else {
+            if (this.isXML) {
+                const xmlCharKey = updatedOptions.xml.xmlCharKey;
+                /**
+                 * If the mapper specifies this as a non-composite type value but the responseBody contains
+                 * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties,
+                 * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property.
+                 */
+                if (responseBody[interfaces_js_1.XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) {
+                    responseBody = responseBody[xmlCharKey];
+                }
+            }
+            if (mapperType.match(/^Number$/i) !== null) {
+                payload = parseFloat(responseBody);
+                if (isNaN(payload)) {
+                    payload = responseBody;
+                }
+            }
+            else if (mapperType.match(/^Boolean$/i) !== null) {
+                if (responseBody === "true") {
+                    payload = true;
+                }
+                else if (responseBody === "false") {
+                    payload = false;
+                }
+                else {
+                    payload = responseBody;
+                }
+            }
+            else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {
+                payload = responseBody;
+            }
+            else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {
+                payload = new Date(responseBody);
+            }
+            else if (mapperType.match(/^UnixTime$/i) !== null) {
+                payload = unixTimeToDate(responseBody);
+            }
+            else if (mapperType.match(/^ByteArray$/i) !== null) {
+                payload = base64.decodeString(responseBody);
+            }
+            else if (mapperType.match(/^Base64Url$/i) !== null) {
+                payload = base64UrlToByteArray(responseBody);
+            }
+            else if (mapperType.match(/^Sequence$/i) !== null) {
+                payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions);
+            }
+            else if (mapperType.match(/^Dictionary$/i) !== null) {
+                payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions);
+            }
+        }
+        if (mapper.isConstant) {
+            payload = mapper.defaultValue;
+        }
+        return payload;
+    }
+}
+/**
+ * Method that creates and returns a Serializer.
+ * @param modelMappers - Known models to map
+ * @param isXML - If XML should be supported
+ */
+function createSerializer(modelMappers = {}, isXML = false) {
+    return new SerializerImpl(modelMappers, isXML);
+}
+exports.createSerializer = createSerializer;
+function trimEnd(str, ch) {
+    let len = str.length;
+    while (len - 1 >= 0 && str[len - 1] === ch) {
+        --len;
+    }
+    return str.substr(0, len);
+}
+function bufferToBase64Url(buffer) {
+    if (!buffer) {
+        return undefined;
+    }
+    if (!(buffer instanceof Uint8Array)) {
+        throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);
+    }
+    // Uint8Array to Base64.
+    const str = base64.encodeByteArray(buffer);
+    // Base64 to Base64Url.
+    return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_");
+}
+function base64UrlToByteArray(str) {
+    if (!str) {
+        return undefined;
+    }
+    if (str && typeof str.valueOf() !== "string") {
+        throw new Error("Please provide an input of type string for converting to Uint8Array");
+    }
+    // Base64Url to Base64.
+    str = str.replace(/-/g, "+").replace(/_/g, "/");
+    // Base64 to Uint8Array.
+    return base64.decodeString(str);
+}
+function splitSerializeName(prop) {
+    const classes = [];
+    let partialclass = "";
+    if (prop) {
+        const subwords = prop.split(".");
+        for (const item of subwords) {
+            if (item.charAt(item.length - 1) === "\\") {
+                partialclass += item.substr(0, item.length - 1) + ".";
+            }
+            else {
+                partialclass += item;
+                classes.push(partialclass);
+                partialclass = "";
+            }
+        }
+    }
+    return classes;
+}
+function dateToUnixTime(d) {
+    if (!d) {
+        return undefined;
+    }
+    if (typeof d.valueOf() === "string") {
+        d = new Date(d);
+    }
+    return Math.floor(d.getTime() / 1000);
+}
+function unixTimeToDate(n) {
+    if (!n) {
+        return undefined;
+    }
+    return new Date(n * 1000);
+}
+function serializeBasicTypes(typeName, objectName, value) {
+    if (value !== null && value !== undefined) {
+        if (typeName.match(/^Number$/i) !== null) {
+            if (typeof value !== "number") {
+                throw new Error(`${objectName} with value ${value} must be of type number.`);
+            }
+        }
+        else if (typeName.match(/^String$/i) !== null) {
+            if (typeof value.valueOf() !== "string") {
+                throw new Error(`${objectName} with value "${value}" must be of type string.`);
+            }
+        }
+        else if (typeName.match(/^Uuid$/i) !== null) {
+            if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) {
+                throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`);
+            }
+        }
+        else if (typeName.match(/^Boolean$/i) !== null) {
+            if (typeof value !== "boolean") {
+                throw new Error(`${objectName} with value ${value} must be of type boolean.`);
+            }
+        }
+        else if (typeName.match(/^Stream$/i) !== null) {
+            const objectType = typeof value;
+            if (objectType !== "string" &&
+                typeof value.pipe !== "function" && // NodeJS.ReadableStream
+                typeof value.tee !== "function" && // browser ReadableStream
+                !(value instanceof ArrayBuffer) &&
+                !ArrayBuffer.isView(value) &&
+                // File objects count as a type of Blob, so we want to use instanceof explicitly
+                !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) &&
+                objectType !== "function") {
+                throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`);
+            }
+        }
+    }
+    return value;
+}
+function serializeEnumType(objectName, allowedValues, value) {
+    if (!allowedValues) {
+        throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`);
+    }
+    const isPresent = allowedValues.some((item) => {
+        if (typeof item.valueOf() === "string") {
+            return item.toLowerCase() === value.toLowerCase();
+        }
+        return item === value;
+    });
+    if (!isPresent) {
+        throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`);
+    }
+    return value;
+}
+function serializeByteArrayType(objectName, value) {
+    if (value !== undefined && value !== null) {
+        if (!(value instanceof Uint8Array)) {
+            throw new Error(`${objectName} must be of type Uint8Array.`);
+        }
+        value = base64.encodeByteArray(value);
+    }
+    return value;
+}
+function serializeBase64UrlType(objectName, value) {
+    if (value !== undefined && value !== null) {
+        if (!(value instanceof Uint8Array)) {
+            throw new Error(`${objectName} must be of type Uint8Array.`);
+        }
+        value = bufferToBase64Url(value);
+    }
+    return value;
+}
+function serializeDateTypes(typeName, value, objectName) {
+    if (value !== undefined && value !== null) {
+        if (typeName.match(/^Date$/i) !== null) {
+            if (!(value instanceof Date ||
+                (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) {
+                throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);
+            }
+            value =
+                value instanceof Date
+                    ? value.toISOString().substring(0, 10)
+                    : new Date(value).toISOString().substring(0, 10);
+        }
+        else if (typeName.match(/^DateTime$/i) !== null) {
+            if (!(value instanceof Date ||
+                (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) {
+                throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);
+            }
+            value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();
+        }
+        else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {
+            if (!(value instanceof Date ||
+                (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) {
+                throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);
+            }
+            value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();
+        }
+        else if (typeName.match(/^UnixTime$/i) !== null) {
+            if (!(value instanceof Date ||
+                (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) {
+                throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +
+                    `for it to be serialized in UnixTime/Epoch format.`);
+            }
+            value = dateToUnixTime(value);
+        }
+        else if (typeName.match(/^TimeSpan$/i) !== null) {
+            if (!(0, utils_js_1.isDuration)(value)) {
+                throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`);
+            }
+        }
+    }
+    return value;
+}
+function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) {
+    var _a;
+    if (!Array.isArray(object)) {
+        throw new Error(`${objectName} must be of type Array.`);
+    }
+    let elementType = mapper.type.element;
+    if (!elementType || typeof elementType !== "object") {
+        throw new Error(`element" metadata for an Array must be defined in the ` +
+            `mapper and it must of type "object" in ${objectName}.`);
+    }
+    // Quirk: Composite mappers referenced by `element` might
+    // not have *all* properties declared (like uberParent),
+    // so let's try to look up the full definition by name.
+    if (elementType.type.name === "Composite" && elementType.type.className) {
+        elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType;
+    }
+    const tempArray = [];
+    for (let i = 0; i < object.length; i++) {
+        const serializedValue = serializer.serialize(elementType, object[i], objectName, options);
+        if (isXml && elementType.xmlNamespace) {
+            const xmlnsKey = elementType.xmlNamespacePrefix
+                ? `xmlns:${elementType.xmlNamespacePrefix}`
+                : "xmlns";
+            if (elementType.type.name === "Composite") {
+                tempArray[i] = Object.assign({}, serializedValue);
+                tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };
+            }
+            else {
+                tempArray[i] = {};
+                tempArray[i][options.xml.xmlCharKey] = serializedValue;
+                tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };
+            }
+        }
+        else {
+            tempArray[i] = serializedValue;
+        }
+    }
+    return tempArray;
+}
+function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) {
+    if (typeof object !== "object") {
+        throw new Error(`${objectName} must be of type object.`);
+    }
+    const valueType = mapper.type.value;
+    if (!valueType || typeof valueType !== "object") {
+        throw new Error(`"value" metadata for a Dictionary must be defined in the ` +
+            `mapper and it must of type "object" in ${objectName}.`);
+    }
+    const tempDictionary = {};
+    for (const key of Object.keys(object)) {
+        const serializedValue = serializer.serialize(valueType, object[key], objectName, options);
+        // If the element needs an XML namespace we need to add it within the $ property
+        tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);
+    }
+    // Add the namespace to the root element if needed
+    if (isXml && mapper.xmlNamespace) {
+        const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns";
+        const result = tempDictionary;
+        result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };
+        return result;
+    }
+    return tempDictionary;
+}
+/**
+ * Resolves the additionalProperties property from a referenced mapper
+ * @param serializer - the serializer containing the entire set of mappers
+ * @param mapper - the composite mapper to resolve
+ * @param objectName - name of the object being serialized
+ */
+function resolveAdditionalProperties(serializer, mapper, objectName) {
+    const additionalProperties = mapper.type.additionalProperties;
+    if (!additionalProperties && mapper.type.className) {
+        const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);
+        return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties;
+    }
+    return additionalProperties;
+}
+/**
+ * Finds the mapper referenced by className
+ * @param serializer - the serializer containing the entire set of mappers
+ * @param mapper - the composite mapper to resolve
+ * @param objectName - name of the object being serialized
+ */
+function resolveReferencedMapper(serializer, mapper, objectName) {
+    const className = mapper.type.className;
+    if (!className) {
+        throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`);
+    }
+    return serializer.modelMappers[className];
+}
+/**
+ * Resolves a composite mapper's modelProperties.
+ * @param serializer - the serializer containing the entire set of mappers
+ * @param mapper - the composite mapper to resolve
+ */
+function resolveModelProperties(serializer, mapper, objectName) {
+    let modelProps = mapper.type.modelProperties;
+    if (!modelProps) {
+        const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);
+        if (!modelMapper) {
+            throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`);
+        }
+        modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties;
+        if (!modelProps) {
+            throw new Error(`modelProperties cannot be null or undefined in the ` +
+                `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`);
+        }
+    }
+    return modelProps;
+}
+function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) {
+    if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {
+        mapper = getPolymorphicMapper(serializer, mapper, object, "clientName");
+    }
+    if (object !== undefined && object !== null) {
+        const payload = {};
+        const modelProps = resolveModelProperties(serializer, mapper, objectName);
+        for (const key of Object.keys(modelProps)) {
+            const propertyMapper = modelProps[key];
+            if (propertyMapper.readOnly) {
+                continue;
+            }
+            let propName;
+            let parentObject = payload;
+            if (serializer.isXML) {
+                if (propertyMapper.xmlIsWrapped) {
+                    propName = propertyMapper.xmlName;
+                }
+                else {
+                    propName = propertyMapper.xmlElementName || propertyMapper.xmlName;
+                }
+            }
+            else {
+                const paths = splitSerializeName(propertyMapper.serializedName);
+                propName = paths.pop();
+                for (const pathName of paths) {
+                    const childObject = parentObject[pathName];
+                    if ((childObject === undefined || childObject === null) &&
+                        ((object[key] !== undefined && object[key] !== null) ||
+                            propertyMapper.defaultValue !== undefined)) {
+                        parentObject[pathName] = {};
+                    }
+                    parentObject = parentObject[pathName];
+                }
+            }
+            if (parentObject !== undefined && parentObject !== null) {
+                if (isXml && mapper.xmlNamespace) {
+                    const xmlnsKey = mapper.xmlNamespacePrefix
+                        ? `xmlns:${mapper.xmlNamespacePrefix}`
+                        : "xmlns";
+                    parentObject[interfaces_js_1.XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[interfaces_js_1.XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace });
+                }
+                const propertyObjectName = propertyMapper.serializedName !== ""
+                    ? objectName + "." + propertyMapper.serializedName
+                    : objectName;
+                let toSerialize = object[key];
+                const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);
+                if (polymorphicDiscriminator &&
+                    polymorphicDiscriminator.clientName === key &&
+                    (toSerialize === undefined || toSerialize === null)) {
+                    toSerialize = mapper.serializedName;
+                }
+                const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options);
+                if (serializedValue !== undefined && propName !== undefined && propName !== null) {
+                    const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);
+                    if (isXml && propertyMapper.xmlIsAttribute) {
+                        // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.
+                        // This keeps things simple while preventing name collision
+                        // with names in user documents.
+                        parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {};
+                        parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue;
+                    }
+                    else if (isXml && propertyMapper.xmlIsWrapped) {
+                        parentObject[propName] = { [propertyMapper.xmlElementName]: value };
+                    }
+                    else {
+                        parentObject[propName] = value;
+                    }
+                }
+            }
+        }
+        const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);
+        if (additionalPropertiesMapper) {
+            const propNames = Object.keys(modelProps);
+            for (const clientPropName in object) {
+                const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);
+                if (isAdditionalProperty) {
+                    payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options);
+                }
+            }
+        }
+        return payload;
+    }
+    return object;
+}
+function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) {
+    if (!isXml || !propertyMapper.xmlNamespace) {
+        return serializedValue;
+    }
+    const xmlnsKey = propertyMapper.xmlNamespacePrefix
+        ? `xmlns:${propertyMapper.xmlNamespacePrefix}`
+        : "xmlns";
+    const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };
+    if (["Composite"].includes(propertyMapper.type.name)) {
+        if (serializedValue[interfaces_js_1.XML_ATTRKEY]) {
+            return serializedValue;
+        }
+        else {
+            const result = Object.assign({}, serializedValue);
+            result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace;
+            return result;
+        }
+    }
+    const result = {};
+    result[options.xml.xmlCharKey] = serializedValue;
+    result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace;
+    return result;
+}
+function isSpecialXmlProperty(propertyName, options) {
+    return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName);
+}
+function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) {
+    var _a, _b;
+    const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : interfaces_js_1.XML_CHARKEY;
+    if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {
+        mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName");
+    }
+    const modelProps = resolveModelProperties(serializer, mapper, objectName);
+    let instance = {};
+    const handledPropertyNames = [];
+    for (const key of Object.keys(modelProps)) {
+        const propertyMapper = modelProps[key];
+        const paths = splitSerializeName(modelProps[key].serializedName);
+        handledPropertyNames.push(paths[0]);
+        const { serializedName, xmlName, xmlElementName } = propertyMapper;
+        let propertyObjectName = objectName;
+        if (serializedName !== "" && serializedName !== undefined) {
+            propertyObjectName = objectName + "." + serializedName;
+        }
+        const headerCollectionPrefix = propertyMapper.headerCollectionPrefix;
+        if (headerCollectionPrefix) {
+            const dictionary = {};
+            for (const headerKey of Object.keys(responseBody)) {
+                if (headerKey.startsWith(headerCollectionPrefix)) {
+                    dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options);
+                }
+                handledPropertyNames.push(headerKey);
+            }
+            instance[key] = dictionary;
+        }
+        else if (serializer.isXML) {
+            if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) {
+                instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options);
+            }
+            else if (propertyMapper.xmlIsMsText) {
+                if (responseBody[xmlCharKey] !== undefined) {
+                    instance[key] = responseBody[xmlCharKey];
+                }
+                else if (typeof responseBody === "string") {
+                    // The special case where xml parser parses "<Name>content</Name>" into JSON of
+                    //   `{ name: "content"}` instead of `{ name: { "_": "content" }}`
+                    instance[key] = responseBody;
+                }
+            }
+            else {
+                const propertyName = xmlElementName || xmlName || serializedName;
+                if (propertyMapper.xmlIsWrapped) {
+                    /* a list of <xmlElementName> wrapped by <xmlName>
+                      For the xml example below
+                        <Cors>
+                          <CorsRule>...</CorsRule>
+                          <CorsRule>...</CorsRule>
+                        </Cors>
+                      the responseBody has
+                        {
+                          Cors: {
+                            CorsRule: [{...}, {...}]
+                          }
+                        }
+                      xmlName is "Cors" and xmlElementName is"CorsRule".
+                    */
+                    const wrapped = responseBody[xmlName];
+                    const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : [];
+                    instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options);
+                    handledPropertyNames.push(xmlName);
+                }
+                else {
+                    const property = responseBody[propertyName];
+                    instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options);
+                    handledPropertyNames.push(propertyName);
+                }
+            }
+        }
+        else {
+            // deserialize the property if it is present in the provided responseBody instance
+            let propertyInstance;
+            let res = responseBody;
+            // traversing the object step by step.
+            let steps = 0;
+            for (const item of paths) {
+                if (!res)
+                    break;
+                steps++;
+                res = res[item];
+            }
+            // only accept null when reaching the last position of object otherwise it would be undefined
+            if (res === null && steps < paths.length) {
+                res = undefined;
+            }
+            propertyInstance = res;
+            const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;
+            // checking that the model property name (key)(ex: "fishtype") and the
+            // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype")
+            // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type")
+            // is a better approach. The generator is not consistent with escaping '\.' in the
+            // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator
+            // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However,
+            // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and
+            // the transformation of model property name (ex: "fishtype") is done consistently.
+            // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.
+            if (polymorphicDiscriminator &&
+                key === polymorphicDiscriminator.clientName &&
+                (propertyInstance === undefined || propertyInstance === null)) {
+                propertyInstance = mapper.serializedName;
+            }
+            let serializedValue;
+            // paging
+            if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") {
+                propertyInstance = responseBody[key];
+                const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options);
+                // Copy over any properties that have already been added into the instance, where they do
+                // not exist on the newly de-serialized array
+                for (const [k, v] of Object.entries(instance)) {
+                    if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {
+                        arrayInstance[k] = v;
+                    }
+                }
+                instance = arrayInstance;
+            }
+            else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {
+                serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options);
+                instance[key] = serializedValue;
+            }
+        }
+    }
+    const additionalPropertiesMapper = mapper.type.additionalProperties;
+    if (additionalPropertiesMapper) {
+        const isAdditionalProperty = (responsePropName) => {
+            for (const clientPropName in modelProps) {
+                const paths = splitSerializeName(modelProps[clientPropName].serializedName);
+                if (paths[0] === responsePropName) {
+                    return false;
+                }
+            }
+            return true;
+        };
+        for (const responsePropName in responseBody) {
+            if (isAdditionalProperty(responsePropName)) {
+                instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options);
+            }
+        }
+    }
+    else if (responseBody && !options.ignoreUnknownProperties) {
+        for (const key of Object.keys(responseBody)) {
+            if (instance[key] === undefined &&
+                !handledPropertyNames.includes(key) &&
+                !isSpecialXmlProperty(key, options)) {
+                instance[key] = responseBody[key];
+            }
+        }
+    }
+    return instance;
+}
+function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) {
+    /* jshint validthis: true */
+    const value = mapper.type.value;
+    if (!value || typeof value !== "object") {
+        throw new Error(`"value" metadata for a Dictionary must be defined in the ` +
+            `mapper and it must of type "object" in ${objectName}`);
+    }
+    if (responseBody) {
+        const tempDictionary = {};
+        for (const key of Object.keys(responseBody)) {
+            tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);
+        }
+        return tempDictionary;
+    }
+    return responseBody;
+}
+function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) {
+    var _a;
+    let element = mapper.type.element;
+    if (!element || typeof element !== "object") {
+        throw new Error(`element" metadata for an Array must be defined in the ` +
+            `mapper and it must of type "object" in ${objectName}`);
+    }
+    if (responseBody) {
+        if (!Array.isArray(responseBody)) {
+            // xml2js will interpret a single element array as just the element, so force it to be an array
+            responseBody = [responseBody];
+        }
+        // Quirk: Composite mappers referenced by `element` might
+        // not have *all* properties declared (like uberParent),
+        // so let's try to look up the full definition by name.
+        if (element.type.name === "Composite" && element.type.className) {
+            element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element;
+        }
+        const tempArray = [];
+        for (let i = 0; i < responseBody.length; i++) {
+            tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options);
+        }
+        return tempArray;
+    }
+    return responseBody;
+}
+function getIndexDiscriminator(discriminators, discriminatorValue, typeName) {
+    const typeNamesToCheck = [typeName];
+    while (typeNamesToCheck.length) {
+        const currentName = typeNamesToCheck.shift();
+        const indexDiscriminator = discriminatorValue === currentName
+            ? discriminatorValue
+            : currentName + "." + discriminatorValue;
+        if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) {
+            return discriminators[indexDiscriminator];
+        }
+        else {
+            for (const [name, mapper] of Object.entries(discriminators)) {
+                if (name.startsWith(currentName + ".") &&
+                    mapper.type.uberParent === currentName &&
+                    mapper.type.className) {
+                    typeNamesToCheck.push(mapper.type.className);
+                }
+            }
+        }
+    }
+    return undefined;
+}
+function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) {
+    var _a;
+    const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);
+    if (polymorphicDiscriminator) {
+        let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];
+        if (discriminatorName) {
+            // The serializedName might have \\, which we just want to ignore
+            if (polymorphicPropertyName === "serializedName") {
+                discriminatorName = discriminatorName.replace(/\\/gi, "");
+            }
+            const discriminatorValue = object[discriminatorName];
+            const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className;
+            if (typeof discriminatorValue === "string" && typeName) {
+                const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName);
+                if (polymorphicMapper) {
+                    mapper = polymorphicMapper;
+                }
+            }
+        }
+    }
+    return mapper;
+}
+function getPolymorphicDiscriminatorRecursively(serializer, mapper) {
+    return (mapper.type.polymorphicDiscriminator ||
+        getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||
+        getPolymorphicDiscriminatorSafely(serializer, mapper.type.className));
+}
+function getPolymorphicDiscriminatorSafely(serializer, typeName) {
+    return (typeName &&
+        serializer.modelMappers[typeName] &&
+        serializer.modelMappers[typeName].type.polymorphicDiscriminator);
+}
+/**
+ * Known types of Mappers
+ */
+exports.MapperTypeNames = {
+    Base64Url: "Base64Url",
+    Boolean: "Boolean",
+    ByteArray: "ByteArray",
+    Composite: "Composite",
+    Date: "Date",
+    DateTime: "DateTime",
+    DateTimeRfc1123: "DateTimeRfc1123",
+    Dictionary: "Dictionary",
+    Enum: "Enum",
+    Number: "Number",
+    Object: "Object",
+    Sequence: "Sequence",
+    String: "String",
+    Stream: "Stream",
+    TimeSpan: "TimeSpan",
+    UnixTime: "UnixTime",
+};
+//# sourceMappingURL=serializer.js.map
+
+/***/ }),
+
+/***/ 317:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.ServiceClient = void 0;
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+const pipeline_js_1 = __nccwpck_require__(4971);
+const utils_js_1 = __nccwpck_require__(7316);
+const httpClientCache_js_1 = __nccwpck_require__(5210);
+const operationHelpers_js_1 = __nccwpck_require__(5047);
+const urlHelpers_js_1 = __nccwpck_require__(5051);
+const interfaceHelpers_js_1 = __nccwpck_require__(8785);
+const log_js_1 = __nccwpck_require__(5151);
+/**
+ * Initializes a new instance of the ServiceClient.
+ */
+class ServiceClient {
+    /**
+     * The ServiceClient constructor
+     * @param credential - The credentials used for authentication with the service.
+     * @param options - The service client options that govern the behavior of the client.
+     */
+    constructor(options = {}) {
+        var _a, _b;
+        this._requestContentType = options.requestContentType;
+        this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri;
+        if (options.baseUri) {
+            log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead.");
+        }
+        this._allowInsecureConnection = options.allowInsecureConnection;
+        this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)();
+        this.pipeline = options.pipeline || createDefaultPipeline(options);
+        if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) {
+            for (const { policy, position } of options.additionalPolicies) {
+                // Sign happens after Retry and is commonly needed to occur
+                // before policies that intercept post-retry.
+                const afterPhase = position === "perRetry" ? "Sign" : undefined;
+                this.pipeline.addPolicy(policy, {
+                    afterPhase,
+                });
+            }
+        }
+    }
+    /**
+     * Send the provided httpRequest.
+     */
+    async sendRequest(request) {
+        return this.pipeline.sendRequest(this._httpClient, request);
+    }
+    /**
+     * Send an HTTP request that is populated using the provided OperationSpec.
+     * @typeParam T - The typed result of the request, based on the OperationSpec.
+     * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.
+     * @param operationSpec - The OperationSpec to use to populate the httpRequest.
+     */
+    async sendOperationRequest(operationArguments, operationSpec) {
+        const endpoint = operationSpec.baseUrl || this._endpoint;
+        if (!endpoint) {
+            throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use.");
+        }
+        // Templatized URLs sometimes reference properties on the ServiceClient child class,
+        // so we have to pass `this` below in order to search these properties if they're
+        // not part of OperationArguments
+        const url = (0, urlHelpers_js_1.getRequestUrl)(endpoint, operationSpec, operationArguments, this);
+        const request = (0, core_rest_pipeline_1.createPipelineRequest)({
+            url,
+        });
+        request.method = operationSpec.httpMethod;
+        const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request);
+        operationInfo.operationSpec = operationSpec;
+        operationInfo.operationArguments = operationArguments;
+        const contentType = operationSpec.contentType || this._requestContentType;
+        if (contentType && operationSpec.requestBody) {
+            request.headers.set("Content-Type", contentType);
+        }
+        const options = operationArguments.options;
+        if (options) {
+            const requestOptions = options.requestOptions;
+            if (requestOptions) {
+                if (requestOptions.timeout) {
+                    request.timeout = requestOptions.timeout;
+                }
+                if (requestOptions.onUploadProgress) {
+                    request.onUploadProgress = requestOptions.onUploadProgress;
+                }
+                if (requestOptions.onDownloadProgress) {
+                    request.onDownloadProgress = requestOptions.onDownloadProgress;
+                }
+                if (requestOptions.shouldDeserialize !== undefined) {
+                    operationInfo.shouldDeserialize = requestOptions.shouldDeserialize;
+                }
+                if (requestOptions.allowInsecureConnection) {
+                    request.allowInsecureConnection = true;
+                }
+            }
+            if (options.abortSignal) {
+                request.abortSignal = options.abortSignal;
+            }
+            if (options.tracingOptions) {
+                request.tracingOptions = options.tracingOptions;
+            }
+        }
+        if (this._allowInsecureConnection) {
+            request.allowInsecureConnection = true;
+        }
+        if (request.streamResponseStatusCodes === undefined) {
+            request.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec);
+        }
+        try {
+            const rawResponse = await this.sendRequest(request);
+            const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]);
+            if (options === null || options === void 0 ? void 0 : options.onResponse) {
+                options.onResponse(rawResponse, flatResponse);
+            }
+            return flatResponse;
+        }
+        catch (error) {
+            if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) {
+                const rawResponse = error.response;
+                const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]);
+                error.details = flatResponse;
+                if (options === null || options === void 0 ? void 0 : options.onResponse) {
+                    options.onResponse(rawResponse, flatResponse, error);
+                }
+            }
+            throw error;
+        }
+    }
+}
+exports.ServiceClient = ServiceClient;
+function createDefaultPipeline(options) {
+    const credentialScopes = getCredentialScopes(options);
+    const credentialOptions = options.credential && credentialScopes
+        ? { credentialScopes, credential: options.credential }
+        : undefined;
+    return (0, pipeline_js_1.createClientPipeline)(Object.assign(Object.assign({}, options), { credentialOptions }));
+}
+function getCredentialScopes(options) {
+    if (options.credentialScopes) {
+        return options.credentialScopes;
+    }
+    if (options.endpoint) {
+        return `${options.endpoint}/.default`;
+    }
+    if (options.baseUri) {
+        return `${options.baseUri}/.default`;
+    }
+    if (options.credential && !options.credentialScopes) {
+        throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`);
+    }
+    return undefined;
+}
+//# sourceMappingURL=serviceClient.js.map
+
+/***/ }),
+
+/***/ 6204:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.state = void 0;
+/**
+ * Holds the singleton operationRequestMap, to be shared across CJS and ESM imports.
+ */
+exports.state = {
+    operationRequestMap: new WeakMap(),
+};
+//# sourceMappingURL=state-cjs.cjs.map
+
+/***/ }),
+
+/***/ 5051:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.appendQueryParams = exports.getRequestUrl = void 0;
+const operationHelpers_js_1 = __nccwpck_require__(5047);
+const interfaceHelpers_js_1 = __nccwpck_require__(8785);
+const CollectionFormatToDelimiterMap = {
+    CSV: ",",
+    SSV: " ",
+    Multi: "Multi",
+    TSV: "\t",
+    Pipes: "|",
+};
+function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) {
+    const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject);
+    let isAbsolutePath = false;
+    let requestUrl = replaceAll(baseUri, urlReplacements);
+    if (operationSpec.path) {
+        let path = replaceAll(operationSpec.path, urlReplacements);
+        // QUIRK: sometimes we get a path component like /{nextLink}
+        // which may be a fully formed URL with a leading /. In that case, we should
+        // remove the leading /
+        if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) {
+            path = path.substring(1);
+        }
+        // QUIRK: sometimes we get a path component like {nextLink}
+        // which may be a fully formed URL. In that case, we should
+        // ignore the baseUri.
+        if (isAbsoluteUrl(path)) {
+            requestUrl = path;
+            isAbsolutePath = true;
+        }
+        else {
+            requestUrl = appendPath(requestUrl, path);
+        }
+    }
+    const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject);
+    /**
+     * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl`
+     * is an absolute path. This ensures that existing query parameter values in `requestUrl`
+     * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it
+     * is still being built so there is nothing to overwrite.
+     */
+    requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath);
+    return requestUrl;
+}
+exports.getRequestUrl = getRequestUrl;
+function replaceAll(input, replacements) {
+    let result = input;
+    for (const [searchValue, replaceValue] of replacements) {
+        result = result.split(searchValue).join(replaceValue);
+    }
+    return result;
+}
+function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) {
+    var _a;
+    const result = new Map();
+    if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) {
+        for (const urlParameter of operationSpec.urlParameters) {
+            let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject);
+            const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter);
+            urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString);
+            if (!urlParameter.skipEncoding) {
+                urlParameterValue = encodeURIComponent(urlParameterValue);
+            }
+            result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue);
+        }
+    }
+    return result;
+}
+function isAbsoluteUrl(url) {
+    return url.includes("://");
+}
+function appendPath(url, pathToAppend) {
+    if (!pathToAppend) {
+        return url;
+    }
+    const parsedUrl = new URL(url);
+    let newPath = parsedUrl.pathname;
+    if (!newPath.endsWith("/")) {
+        newPath = `${newPath}/`;
+    }
+    if (pathToAppend.startsWith("/")) {
+        pathToAppend = pathToAppend.substring(1);
+    }
+    const searchStart = pathToAppend.indexOf("?");
+    if (searchStart !== -1) {
+        const path = pathToAppend.substring(0, searchStart);
+        const search = pathToAppend.substring(searchStart + 1);
+        newPath = newPath + path;
+        if (search) {
+            parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search;
+        }
+    }
+    else {
+        newPath = newPath + pathToAppend;
+    }
+    parsedUrl.pathname = newPath;
+    return parsedUrl.toString();
+}
+function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) {
+    var _a;
+    const result = new Map();
+    const sequenceParams = new Set();
+    if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) {
+        for (const queryParameter of operationSpec.queryParameters) {
+            if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) {
+                sequenceParams.add(queryParameter.mapper.serializedName);
+            }
+            let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject);
+            if ((queryParameterValue !== undefined && queryParameterValue !== null) ||
+                queryParameter.mapper.required) {
+                queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter));
+                const delimiter = queryParameter.collectionFormat
+                    ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat]
+                    : "";
+                if (Array.isArray(queryParameterValue)) {
+                    // replace null and undefined
+                    queryParameterValue = queryParameterValue.map((item) => {
+                        if (item === null || item === undefined) {
+                            return "";
+                        }
+                        return item;
+                    });
+                }
+                if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) {
+                    continue;
+                }
+                else if (Array.isArray(queryParameterValue) &&
+                    (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) {
+                    queryParameterValue = queryParameterValue.join(delimiter);
+                }
+                if (!queryParameter.skipEncoding) {
+                    if (Array.isArray(queryParameterValue)) {
+                        queryParameterValue = queryParameterValue.map((item) => {
+                            return encodeURIComponent(item);
+                        });
+                    }
+                    else {
+                        queryParameterValue = encodeURIComponent(queryParameterValue);
+                    }
+                }
+                // Join pipes and CSV *after* encoding, or the server will be upset.
+                if (Array.isArray(queryParameterValue) &&
+                    (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) {
+                    queryParameterValue = queryParameterValue.join(delimiter);
+                }
+                result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue);
+            }
+        }
+    }
+    return {
+        queryParams: result,
+        sequenceParams,
+    };
+}
+function simpleParseQueryParams(queryString) {
+    const result = new Map();
+    if (!queryString || queryString[0] !== "?") {
+        return result;
+    }
+    // remove the leading ?
+    queryString = queryString.slice(1);
+    const pairs = queryString.split("&");
+    for (const pair of pairs) {
+        const [name, value] = pair.split("=", 2);
+        const existingValue = result.get(name);
+        if (existingValue) {
+            if (Array.isArray(existingValue)) {
+                existingValue.push(value);
+            }
+            else {
+                result.set(name, [existingValue, value]);
+            }
+        }
+        else {
+            result.set(name, value);
+        }
+    }
+    return result;
+}
+/** @internal */
+function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) {
+    if (queryParams.size === 0) {
+        return url;
+    }
+    const parsedUrl = new URL(url);
+    // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which
+    // can change their meaning to the server, such as in the case of a SAS signature.
+    // To avoid accidentally un-encoding a query param, we parse the key/values ourselves
+    const combinedParams = simpleParseQueryParams(parsedUrl.search);
+    for (const [name, value] of queryParams) {
+        const existingValue = combinedParams.get(name);
+        if (Array.isArray(existingValue)) {
+            if (Array.isArray(value)) {
+                existingValue.push(...value);
+                const valueSet = new Set(existingValue);
+                combinedParams.set(name, Array.from(valueSet));
+            }
+            else {
+                existingValue.push(value);
+            }
+        }
+        else if (existingValue) {
+            if (Array.isArray(value)) {
+                value.unshift(existingValue);
+            }
+            else if (sequenceParams.has(name)) {
+                combinedParams.set(name, [existingValue, value]);
+            }
+            if (!noOverwrite) {
+                combinedParams.set(name, value);
+            }
+        }
+        else {
+            combinedParams.set(name, value);
+        }
+    }
+    const searchPieces = [];
+    for (const [name, value] of combinedParams) {
+        if (typeof value === "string") {
+            searchPieces.push(`${name}=${value}`);
+        }
+        else if (Array.isArray(value)) {
+            // QUIRK: If we get an array of values, include multiple key/value pairs
+            for (const subValue of value) {
+                searchPieces.push(`${name}=${subValue}`);
+            }
+        }
+        else {
+            searchPieces.push(`${name}=${value}`);
+        }
+    }
+    // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't.
+    parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : "";
+    return parsedUrl.toString();
+}
+exports.appendQueryParams = appendQueryParams;
+//# sourceMappingURL=urlHelpers.js.map
+
+/***/ }),
+
+/***/ 7316:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.flattenResponse = exports.isValidUuid = exports.isDuration = exports.isPrimitiveBody = void 0;
+/**
+ * A type guard for a primitive response body.
+ * @param value - Value to test
+ *
+ * @internal
+ */
+function isPrimitiveBody(value, mapperTypeName) {
+    return (mapperTypeName !== "Composite" &&
+        mapperTypeName !== "Dictionary" &&
+        (typeof value === "string" ||
+            typeof value === "number" ||
+            typeof value === "boolean" ||
+            (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !==
+                null ||
+            value === undefined ||
+            value === null));
+}
+exports.isPrimitiveBody = isPrimitiveBody;
+const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;
+/**
+ * Returns true if the given string is in ISO 8601 format.
+ * @param value - The value to be validated for ISO 8601 duration format.
+ * @internal
+ */
+function isDuration(value) {
+    return validateISODuration.test(value);
+}
+exports.isDuration = isDuration;
+const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
+/**
+ * Returns true if the provided uuid is valid.
+ *
+ * @param uuid - The uuid that needs to be validated.
+ *
+ * @internal
+ */
+function isValidUuid(uuid) {
+    return validUuidRegex.test(uuid);
+}
+exports.isValidUuid = isValidUuid;
+/**
+ * Maps the response as follows:
+ * - wraps the response body if needed (typically if its type is primitive).
+ * - returns null if the combination of the headers and the body is empty.
+ * - otherwise, returns the combination of the headers and the body.
+ *
+ * @param responseObject - a representation of the parsed response
+ * @returns the response that will be returned to the user which can be null and/or wrapped
+ *
+ * @internal
+ */
+function handleNullableResponseAndWrappableBody(responseObject) {
+    const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body);
+    if (responseObject.hasNullableType &&
+        Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) {
+        return responseObject.shouldWrapBody ? { body: null } : null;
+    }
+    else {
+        return responseObject.shouldWrapBody
+            ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody;
+    }
+}
+/**
+ * Take a `FullOperationResponse` and turn it into a flat
+ * response object to hand back to the consumer.
+ * @param fullResponse - The processed response from the operation request
+ * @param responseSpec - The response map from the OperationSpec
+ *
+ * @internal
+ */
+function flattenResponse(fullResponse, responseSpec) {
+    var _a, _b;
+    const parsedHeaders = fullResponse.parsedHeaders;
+    // head methods never have a body, but we return a boolean set to body property
+    // to indicate presence/absence of the resource
+    if (fullResponse.request.method === "HEAD") {
+        return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody });
+    }
+    const bodyMapper = responseSpec && responseSpec.bodyMapper;
+    const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable);
+    const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name;
+    /** If the body is asked for, we look at the expected body type to handle it */
+    if (expectedBodyTypeName === "Stream") {
+        return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody });
+    }
+    const modelProperties = (expectedBodyTypeName === "Composite" &&
+        bodyMapper.type.modelProperties) ||
+        {};
+    const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === "");
+    if (expectedBodyTypeName === "Sequence" || isPageableResponse) {
+        const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : [];
+        for (const key of Object.keys(modelProperties)) {
+            if (modelProperties[key].serializedName) {
+                arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key];
+            }
+        }
+        if (parsedHeaders) {
+            for (const key of Object.keys(parsedHeaders)) {
+                arrayResponse[key] = parsedHeaders[key];
+            }
+        }
+        return isNullable &&
+            !fullResponse.parsedBody &&
+            !parsedHeaders &&
+            Object.getOwnPropertyNames(modelProperties).length === 0
+            ? null
+            : arrayResponse;
+    }
+    return handleNullableResponseAndWrappableBody({
+        body: fullResponse.parsedBody,
+        headers: parsedHeaders,
+        hasNullableType: isNullable,
+        shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName),
+    });
+}
+exports.flattenResponse = flattenResponse;
+//# sourceMappingURL=utils.js.map
+
+/***/ }),
+
+/***/ 5732:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.ExtendedServiceClient = void 0;
+const disableKeepAlivePolicy_js_1 = __nccwpck_require__(3951);
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+const core_client_1 = __nccwpck_require__(8953);
+const response_js_1 = __nccwpck_require__(9);
+/**
+ * Client to provide compatability between core V1 & V2.
+ */
+class ExtendedServiceClient extends core_client_1.ServiceClient {
+    constructor(options) {
+        var _a, _b;
+        super(options);
+        if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false &&
+            !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) {
+            this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)());
+        }
+        if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) {
+            this.pipeline.removePolicy({
+                name: core_rest_pipeline_1.redirectPolicyName,
+            });
+        }
+    }
+    /**
+     * Compatible send operation request function.
+     *
+     * @param operationArguments - Operation arguments
+     * @param operationSpec - Operation Spec
+     * @returns
+     */
+    async sendOperationRequest(operationArguments, operationSpec) {
+        var _a;
+        const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse;
+        let lastResponse;
+        function onResponse(rawResponse, flatResponse, error) {
+            lastResponse = rawResponse;
+            if (userProvidedCallBack) {
+                userProvidedCallBack(rawResponse, flatResponse, error);
+            }
+        }
+        operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse });
+        const result = await super.sendOperationRequest(operationArguments, operationSpec);
+        if (lastResponse) {
+            Object.defineProperty(result, "_response", {
+                value: (0, response_js_1.toCompatResponse)(lastResponse),
+            });
+        }
+        return result;
+    }
+}
+exports.ExtendedServiceClient = ExtendedServiceClient;
+//# sourceMappingURL=extendedClient.js.map
+
+/***/ }),
+
+/***/ 4094:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.convertHttpClient = void 0;
+const response_js_1 = __nccwpck_require__(9);
+const util_js_1 = __nccwpck_require__(218);
+/**
+ * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient.
+ * @param requestPolicyClient - A HttpClient compatible with core-http
+ * @returns A HttpClient compatible with core-rest-pipeline
+ */
+function convertHttpClient(requestPolicyClient) {
+    return {
+        sendRequest: async (request) => {
+            const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request, { createProxy: true }));
+            return (0, response_js_1.toPipelineResponse)(response);
+        },
+    };
+}
+exports.convertHttpClient = convertHttpClient;
+//# sourceMappingURL=httpClientAdapter.js.map
+
+/***/ }),
+
+/***/ 7184:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toHttpHeadersLike = exports.convertHttpClient = exports.disableKeepAlivePolicyName = exports.HttpPipelineLogLevel = exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.ExtendedServiceClient = void 0;
+/**
+ * A Shim Library that provides compatibility between Core V1 & V2 Packages.
+ *
+ * @packageDocumentation
+ */
+var extendedClient_js_1 = __nccwpck_require__(5732);
+Object.defineProperty(exports, "ExtendedServiceClient", ({ enumerable: true, get: function () { return extendedClient_js_1.ExtendedServiceClient; } }));
+var requestPolicyFactoryPolicy_js_1 = __nccwpck_require__(3850);
+Object.defineProperty(exports, "requestPolicyFactoryPolicyName", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; } }));
+Object.defineProperty(exports, "createRequestPolicyFactoryPolicy", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; } }));
+Object.defineProperty(exports, "HttpPipelineLogLevel", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; } }));
+var disableKeepAlivePolicy_js_1 = __nccwpck_require__(3951);
+Object.defineProperty(exports, "disableKeepAlivePolicyName", ({ enumerable: true, get: function () { return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; } }));
+var httpClientAdapter_js_1 = __nccwpck_require__(4094);
+Object.defineProperty(exports, "convertHttpClient", ({ enumerable: true, get: function () { return httpClientAdapter_js_1.convertHttpClient; } }));
+var util_js_1 = __nccwpck_require__(218);
+Object.defineProperty(exports, "toHttpHeadersLike", ({ enumerable: true, get: function () { return util_js_1.toHttpHeadersLike; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 3951:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.pipelineContainsDisableKeepAlivePolicy = exports.createDisableKeepAlivePolicy = exports.disableKeepAlivePolicyName = void 0;
+exports.disableKeepAlivePolicyName = "DisableKeepAlivePolicy";
+function createDisableKeepAlivePolicy() {
+    return {
+        name: exports.disableKeepAlivePolicyName,
+        async sendRequest(request, next) {
+            request.disableKeepAlive = true;
+            return next(request);
+        },
+    };
+}
+exports.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy;
+/**
+ * @internal
+ */
+function pipelineContainsDisableKeepAlivePolicy(pipeline) {
+    return pipeline.getOrderedPolicies().some((policy) => policy.name === exports.disableKeepAlivePolicyName);
+}
+exports.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy;
+//# sourceMappingURL=disableKeepAlivePolicy.js.map
+
+/***/ }),
+
+/***/ 3850:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.HttpPipelineLogLevel = void 0;
+const util_js_1 = __nccwpck_require__(218);
+const response_js_1 = __nccwpck_require__(9);
+/**
+ * An enum for compatibility with RequestPolicy
+ */
+var HttpPipelineLogLevel;
+(function (HttpPipelineLogLevel) {
+    HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR";
+    HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO";
+    HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF";
+    HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING";
+})(HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = HttpPipelineLogLevel = {}));
+const mockRequestPolicyOptions = {
+    log(_logLevel, _message) {
+        /* do nothing */
+    },
+    shouldLog(_logLevel) {
+        return false;
+    },
+};
+/**
+ * The name of the RequestPolicyFactoryPolicy
+ */
+exports.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy";
+/**
+ * A policy that wraps policies written for core-http.
+ * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline
+ */
+function createRequestPolicyFactoryPolicy(factories) {
+    const orderedFactories = factories.slice().reverse();
+    return {
+        name: exports.requestPolicyFactoryPolicyName,
+        async sendRequest(request, next) {
+            let httpPipeline = {
+                async sendRequest(httpRequest) {
+                    const response = await next((0, util_js_1.toPipelineRequest)(httpRequest));
+                    return (0, response_js_1.toCompatResponse)(response, { createProxy: true });
+                },
+            };
+            for (const factory of orderedFactories) {
+                httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions);
+            }
+            const webResourceLike = (0, util_js_1.toWebResourceLike)(request, { createProxy: true });
+            const response = await httpPipeline.sendRequest(webResourceLike);
+            return (0, response_js_1.toPipelineResponse)(response);
+        },
+    };
+}
+exports.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy;
+//# sourceMappingURL=requestPolicyFactoryPolicy.js.map
+
+/***/ }),
+
+/***/ 9:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toPipelineResponse = exports.toCompatResponse = void 0;
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+const util_js_1 = __nccwpck_require__(218);
+const originalResponse = Symbol("Original FullOperationResponse");
+/**
+ * A helper to convert response objects from the new pipeline back to the old one.
+ * @param response - A response object from core-client.
+ * @returns A response compatible with `HttpOperationResponse` from core-http.
+ */
+function toCompatResponse(response, options) {
+    let request = (0, util_js_1.toWebResourceLike)(response.request);
+    let headers = (0, util_js_1.toHttpHeadersLike)(response.headers);
+    if (options === null || options === void 0 ? void 0 : options.createProxy) {
+        return new Proxy(response, {
+            get(target, prop, receiver) {
+                if (prop === "headers") {
+                    return headers;
+                }
+                else if (prop === "request") {
+                    return request;
+                }
+                else if (prop === originalResponse) {
+                    return response;
+                }
+                return Reflect.get(target, prop, receiver);
+            },
+            set(target, prop, value, receiver) {
+                if (prop === "headers") {
+                    headers = value;
+                }
+                else if (prop === "request") {
+                    request = value;
+                }
+                return Reflect.set(target, prop, value, receiver);
+            },
+        });
+    }
+    else {
+        return Object.assign(Object.assign({}, response), { request,
+            headers });
+    }
+}
+exports.toCompatResponse = toCompatResponse;
+/**
+ * A helper to convert back to a PipelineResponse
+ * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http.
+ */
+function toPipelineResponse(compatResponse) {
+    const extendedCompatResponse = compatResponse;
+    const response = extendedCompatResponse[originalResponse];
+    const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true }));
+    if (response) {
+        response.headers = headers;
+        return response;
+    }
+    else {
+        return Object.assign(Object.assign({}, compatResponse), { headers, request: (0, util_js_1.toPipelineRequest)(compatResponse.request) });
+    }
+}
+exports.toPipelineResponse = toPipelineResponse;
+//# sourceMappingURL=response.js.map
+
+/***/ }),
+
+/***/ 218:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpHeaders = exports.toHttpHeadersLike = exports.toWebResourceLike = exports.toPipelineRequest = void 0;
+const core_rest_pipeline_1 = __nccwpck_require__(5565);
+// We use a custom symbol to cache a reference to the original request without
+// exposing it on the public interface.
+const originalRequestSymbol = Symbol("Original PipelineRequest");
+// Symbol.for() will return the same symbol if it's already been created
+// This particular one is used in core-client to handle the case of when a request is
+// cloned but we need to retrieve the OperationSpec and OperationArguments from the
+// original request.
+const originalClientRequestSymbol = Symbol.for("@azure/core-client original request");
+function toPipelineRequest(webResource, options = {}) {
+    const compatWebResource = webResource;
+    const request = compatWebResource[originalRequestSymbol];
+    const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true }));
+    if (request) {
+        request.headers = headers;
+        return request;
+    }
+    else {
+        const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({
+            url: webResource.url,
+            method: webResource.method,
+            headers,
+            withCredentials: webResource.withCredentials,
+            timeout: webResource.timeout,
+            requestId: webResource.requestId,
+            abortSignal: webResource.abortSignal,
+            body: webResource.body,
+            formData: webResource.formData,
+            disableKeepAlive: !!webResource.keepAlive,
+            onDownloadProgress: webResource.onDownloadProgress,
+            onUploadProgress: webResource.onUploadProgress,
+            proxySettings: webResource.proxySettings,
+            streamResponseStatusCodes: webResource.streamResponseStatusCodes,
+        });
+        if (options.originalRequest) {
+            newRequest[originalClientRequestSymbol] =
+                options.originalRequest;
+        }
+        return newRequest;
+    }
+}
+exports.toPipelineRequest = toPipelineRequest;
+function toWebResourceLike(request, options) {
+    var _a;
+    const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request;
+    const webResource = {
+        url: request.url,
+        method: request.method,
+        headers: toHttpHeadersLike(request.headers),
+        withCredentials: request.withCredentials,
+        timeout: request.timeout,
+        requestId: request.headers.get("x-ms-client-request-id") || request.requestId,
+        abortSignal: request.abortSignal,
+        body: request.body,
+        formData: request.formData,
+        keepAlive: !!request.disableKeepAlive,
+        onDownloadProgress: request.onDownloadProgress,
+        onUploadProgress: request.onUploadProgress,
+        proxySettings: request.proxySettings,
+        streamResponseStatusCodes: request.streamResponseStatusCodes,
+        clone() {
+            throw new Error("Cannot clone a non-proxied WebResourceLike");
+        },
+        prepare() {
+            throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat");
+        },
+        validateRequestProperties() {
+            /** do nothing */
+        },
+    };
+    if (options === null || options === void 0 ? void 0 : options.createProxy) {
+        return new Proxy(webResource, {
+            get(target, prop, receiver) {
+                if (prop === originalRequestSymbol) {
+                    return request;
+                }
+                else if (prop === "clone") {
+                    return () => {
+                        return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), {
+                            createProxy: true,
+                            originalRequest,
+                        });
+                    };
+                }
+                return Reflect.get(target, prop, receiver);
+            },
+            set(target, prop, value, receiver) {
+                if (prop === "keepAlive") {
+                    request.disableKeepAlive = !value;
+                }
+                const passThroughProps = [
+                    "url",
+                    "method",
+                    "withCredentials",
+                    "timeout",
+                    "requestId",
+                    "abortSignal",
+                    "body",
+                    "formData",
+                    "onDownloadProgress",
+                    "onUploadProgress",
+                    "proxySettings",
+                    "streamResponseStatusCodes",
+                ];
+                if (typeof prop === "string" && passThroughProps.includes(prop)) {
+                    request[prop] = value;
+                }
+                return Reflect.set(target, prop, value, receiver);
+            },
+        });
+    }
+    else {
+        return webResource;
+    }
+}
+exports.toWebResourceLike = toWebResourceLike;
+/**
+ * Converts HttpHeaders from core-rest-pipeline to look like
+ * HttpHeaders from core-http.
+ * @param headers - HttpHeaders from core-rest-pipeline
+ * @returns HttpHeaders as they looked in core-http
+ */
+function toHttpHeadersLike(headers) {
+    return new HttpHeaders(headers.toJSON({ preserveCase: true }));
+}
+exports.toHttpHeadersLike = toHttpHeadersLike;
+/**
+ * A collection of HttpHeaders that can be sent with a HTTP request.
+ */
+function getHeaderKey(headerName) {
+    return headerName.toLowerCase();
+}
+/**
+ * A collection of HTTP header key/value pairs.
+ */
+class HttpHeaders {
+    constructor(rawHeaders) {
+        this._headersMap = {};
+        if (rawHeaders) {
+            for (const headerName in rawHeaders) {
+                this.set(headerName, rawHeaders[headerName]);
+            }
+        }
+    }
+    /**
+     * Set a header in this collection with the provided name and value. The name is
+     * case-insensitive.
+     * @param headerName - The name of the header to set. This value is case-insensitive.
+     * @param headerValue - The value of the header to set.
+     */
+    set(headerName, headerValue) {
+        this._headersMap[getHeaderKey(headerName)] = {
+            name: headerName,
+            value: headerValue.toString(),
+        };
+    }
+    /**
+     * Get the header value for the provided header name, or undefined if no header exists in this
+     * collection with the provided name.
+     * @param headerName - The name of the header.
+     */
+    get(headerName) {
+        const header = this._headersMap[getHeaderKey(headerName)];
+        return !header ? undefined : header.value;
+    }
+    /**
+     * Get whether or not this header collection contains a header entry for the provided header name.
+     */
+    contains(headerName) {
+        return !!this._headersMap[getHeaderKey(headerName)];
+    }
+    /**
+     * Remove the header with the provided headerName. Return whether or not the header existed and
+     * was removed.
+     * @param headerName - The name of the header to remove.
+     */
+    remove(headerName) {
+        const result = this.contains(headerName);
+        delete this._headersMap[getHeaderKey(headerName)];
+        return result;
+    }
+    /**
+     * Get the headers that are contained this collection as an object.
+     */
+    rawHeaders() {
+        return this.toJson({ preserveCase: true });
+    }
+    /**
+     * Get the headers that are contained in this collection as an array.
+     */
+    headersArray() {
+        const headers = [];
+        for (const headerKey in this._headersMap) {
+            headers.push(this._headersMap[headerKey]);
+        }
+        return headers;
+    }
+    /**
+     * Get the header names that are contained in this collection.
+     */
+    headerNames() {
+        const headerNames = [];
+        const headers = this.headersArray();
+        for (let i = 0; i < headers.length; ++i) {
+            headerNames.push(headers[i].name);
+        }
+        return headerNames;
+    }
+    /**
+     * Get the header values that are contained in this collection.
+     */
+    headerValues() {
+        const headerValues = [];
+        const headers = this.headersArray();
+        for (let i = 0; i < headers.length; ++i) {
+            headerValues.push(headers[i].value);
+        }
+        return headerValues;
+    }
+    /**
+     * Get the JSON object representation of this HTTP header collection.
+     */
+    toJson(options = {}) {
+        const result = {};
+        if (options.preserveCase) {
+            for (const headerKey in this._headersMap) {
+                const header = this._headersMap[headerKey];
+                result[header.name] = header.value;
+            }
+        }
+        else {
+            for (const headerKey in this._headersMap) {
+                const header = this._headersMap[headerKey];
+                result[getHeaderKey(header.name)] = header.value;
+            }
+        }
+        return result;
+    }
+    /**
+     * Get the string representation of this HTTP header collection.
+     */
+    toString() {
+        return JSON.stringify(this.toJson({ preserveCase: true }));
+    }
+    /**
+     * Create a deep clone/copy of this HttpHeaders collection.
+     */
+    clone() {
+        const resultPreservingCasing = {};
+        for (const headerKey in this._headersMap) {
+            const header = this._headersMap[headerKey];
+            resultPreservingCasing[header.name] = header.value;
+        }
+        return new HttpHeaders(resultPreservingCasing);
+    }
+}
+exports.HttpHeaders = HttpHeaders;
+//# sourceMappingURL=util.js.map
+
+/***/ }),
+
+/***/ 7146:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.pollHttpOperation = exports.isOperationError = exports.getResourceLocation = exports.getOperationStatus = exports.getOperationLocation = exports.initHttpOperation = exports.getStatusFromInitialResponse = exports.getErrorFromResponse = exports.parseRetryAfter = exports.inferLroMode = void 0;
+const operation_js_1 = __nccwpck_require__(804);
+const logger_js_1 = __nccwpck_require__(1932);
+function getOperationLocationPollingUrl(inputs) {
+    const { azureAsyncOperation, operationLocation } = inputs;
+    return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation;
+}
+function getLocationHeader(rawResponse) {
+    return rawResponse.headers["location"];
+}
+function getOperationLocationHeader(rawResponse) {
+    return rawResponse.headers["operation-location"];
+}
+function getAzureAsyncOperationHeader(rawResponse) {
+    return rawResponse.headers["azure-asyncoperation"];
+}
+function findResourceLocation(inputs) {
+    var _a;
+    const { location, requestMethod, requestPath, resourceLocationConfig } = inputs;
+    switch (requestMethod) {
+        case "PUT": {
+            return requestPath;
+        }
+        case "DELETE": {
+            return undefined;
+        }
+        case "PATCH": {
+            return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath;
+        }
+        default: {
+            return getDefault();
+        }
+    }
+    function getDefault() {
+        switch (resourceLocationConfig) {
+            case "azure-async-operation": {
+                return undefined;
+            }
+            case "original-uri": {
+                return requestPath;
+            }
+            case "location":
+            default: {
+                return location;
+            }
+        }
+    }
+}
+function inferLroMode(inputs) {
+    const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs;
+    const operationLocation = getOperationLocationHeader(rawResponse);
+    const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse);
+    const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation });
+    const location = getLocationHeader(rawResponse);
+    const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase();
+    if (pollingUrl !== undefined) {
+        return {
+            mode: "OperationLocation",
+            operationLocation: pollingUrl,
+            resourceLocation: findResourceLocation({
+                requestMethod: normalizedRequestMethod,
+                location,
+                requestPath,
+                resourceLocationConfig,
+            }),
+        };
+    }
+    else if (location !== undefined) {
+        return {
+            mode: "ResourceLocation",
+            operationLocation: location,
+        };
+    }
+    else if (normalizedRequestMethod === "PUT" && requestPath) {
+        return {
+            mode: "Body",
+            operationLocation: requestPath,
+        };
+    }
+    else {
+        return undefined;
+    }
+}
+exports.inferLroMode = inferLroMode;
+function transformStatus(inputs) {
+    const { status, statusCode } = inputs;
+    if (typeof status !== "string" && status !== undefined) {
+        throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`);
+    }
+    switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) {
+        case undefined:
+            return toOperationStatus(statusCode);
+        case "succeeded":
+            return "succeeded";
+        case "failed":
+            return "failed";
+        case "running":
+        case "accepted":
+        case "started":
+        case "canceling":
+        case "cancelling":
+            return "running";
+        case "canceled":
+        case "cancelled":
+            return "canceled";
+        default: {
+            logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`);
+            return status;
+        }
+    }
+}
+function getStatus(rawResponse) {
+    var _a;
+    const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {};
+    return transformStatus({ status, statusCode: rawResponse.statusCode });
+}
+function getProvisioningState(rawResponse) {
+    var _a, _b;
+    const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {};
+    const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState;
+    return transformStatus({ status, statusCode: rawResponse.statusCode });
+}
+function toOperationStatus(statusCode) {
+    if (statusCode === 202) {
+        return "running";
+    }
+    else if (statusCode < 300) {
+        return "succeeded";
+    }
+    else {
+        return "failed";
+    }
+}
+function parseRetryAfter({ rawResponse }) {
+    const retryAfter = rawResponse.headers["retry-after"];
+    if (retryAfter !== undefined) {
+        // Retry-After header value is either in HTTP date format, or in seconds
+        const retryAfterInSeconds = parseInt(retryAfter);
+        return isNaN(retryAfterInSeconds)
+            ? calculatePollingIntervalFromDate(new Date(retryAfter))
+            : retryAfterInSeconds * 1000;
+    }
+    return undefined;
+}
+exports.parseRetryAfter = parseRetryAfter;
+function getErrorFromResponse(response) {
+    const error = accessBodyProperty(response, "error");
+    if (!error) {
+        logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`);
+        return;
+    }
+    if (!error.code || !error.message) {
+        logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`);
+        return;
+    }
+    return error;
+}
+exports.getErrorFromResponse = getErrorFromResponse;
+function calculatePollingIntervalFromDate(retryAfterDate) {
+    const timeNow = Math.floor(new Date().getTime());
+    const retryAfterTime = retryAfterDate.getTime();
+    if (timeNow < retryAfterTime) {
+        return retryAfterTime - timeNow;
+    }
+    return undefined;
+}
+function getStatusFromInitialResponse(inputs) {
+    const { response, state, operationLocation } = inputs;
+    function helper() {
+        var _a;
+        const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"];
+        switch (mode) {
+            case undefined:
+                return toOperationStatus(response.rawResponse.statusCode);
+            case "Body":
+                return getOperationStatus(response, state);
+            default:
+                return "running";
+        }
+    }
+    const status = helper();
+    return status === "running" && operationLocation === undefined ? "succeeded" : status;
+}
+exports.getStatusFromInitialResponse = getStatusFromInitialResponse;
+/**
+ * Initiates the long-running operation.
+ */
+async function initHttpOperation(inputs) {
+    const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs;
+    return (0, operation_js_1.initOperation)({
+        init: async () => {
+            const response = await lro.sendInitialRequest();
+            const config = inferLroMode({
+                rawResponse: response.rawResponse,
+                requestPath: lro.requestPath,
+                requestMethod: lro.requestMethod,
+                resourceLocationConfig,
+            });
+            return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}));
+        },
+        stateProxy,
+        processResult: processResult
+            ? ({ flatResponse }, state) => processResult(flatResponse, state)
+            : ({ flatResponse }) => flatResponse,
+        getOperationStatus: getStatusFromInitialResponse,
+        setErrorAsResult,
+    });
+}
+exports.initHttpOperation = initHttpOperation;
+function getOperationLocation({ rawResponse }, state) {
+    var _a;
+    const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"];
+    switch (mode) {
+        case "OperationLocation": {
+            return getOperationLocationPollingUrl({
+                operationLocation: getOperationLocationHeader(rawResponse),
+                azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse),
+            });
+        }
+        case "ResourceLocation": {
+            return getLocationHeader(rawResponse);
+        }
+        case "Body":
+        default: {
+            return undefined;
+        }
+    }
+}
+exports.getOperationLocation = getOperationLocation;
+function getOperationStatus({ rawResponse }, state) {
+    var _a;
+    const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"];
+    switch (mode) {
+        case "OperationLocation": {
+            return getStatus(rawResponse);
+        }
+        case "ResourceLocation": {
+            return toOperationStatus(rawResponse.statusCode);
+        }
+        case "Body": {
+            return getProvisioningState(rawResponse);
+        }
+        default:
+            throw new Error(`Internal error: Unexpected operation mode: ${mode}`);
+    }
+}
+exports.getOperationStatus = getOperationStatus;
+function accessBodyProperty({ flatResponse, rawResponse }, prop) {
+    var _a, _b;
+    return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop];
+}
+function getResourceLocation(res, state) {
+    const loc = accessBodyProperty(res, "resourceLocation");
+    if (loc && typeof loc === "string") {
+        state.config.resourceLocation = loc;
+    }
+    return state.config.resourceLocation;
+}
+exports.getResourceLocation = getResourceLocation;
+function isOperationError(e) {
+    return e.name === "RestError";
+}
+exports.isOperationError = isOperationError;
+/** Polls the long-running operation. */
+async function pollHttpOperation(inputs) {
+    const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs;
+    return (0, operation_js_1.pollOperation)({
+        state,
+        stateProxy,
+        setDelay,
+        processResult: processResult
+            ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState)
+            : ({ flatResponse }) => flatResponse,
+        getError: getErrorFromResponse,
+        updateState,
+        getPollingInterval: parseRetryAfter,
+        getOperationLocation,
+        getOperationStatus,
+        isOperationError,
+        getResourceLocation,
+        options,
+        /**
+         * The expansion here is intentional because `lro` could be an object that
+         * references an inner this, so we need to preserve a reference to it.
+         */
+        poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions),
+        setErrorAsResult,
+    });
+}
+exports.pollHttpOperation = pollHttpOperation;
+//# sourceMappingURL=operation.js.map
+
+/***/ }),
+
+/***/ 561:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createHttpPoller = void 0;
+const operation_js_1 = __nccwpck_require__(7146);
+const poller_js_1 = __nccwpck_require__(1975);
+/**
+ * Creates a poller that can be used to poll a long-running operation.
+ * @param lro - Description of the long-running operation
+ * @param options - options to configure the poller
+ * @returns an initialized poller
+ */
+async function createHttpPoller(lro, options) {
+    const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {};
+    return (0, poller_js_1.buildCreatePoller)({
+        getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse,
+        getStatusFromPollResponse: operation_js_1.getOperationStatus,
+        isOperationError: operation_js_1.isOperationError,
+        getOperationLocation: operation_js_1.getOperationLocation,
+        getResourceLocation: operation_js_1.getResourceLocation,
+        getPollingInterval: operation_js_1.parseRetryAfter,
+        getError: operation_js_1.getErrorFromResponse,
+        resolveOnUnsuccessful,
+    })({
+        init: async () => {
+            const response = await lro.sendInitialRequest();
+            const config = (0, operation_js_1.inferLroMode)({
+                rawResponse: response.rawResponse,
+                requestPath: lro.requestPath,
+                requestMethod: lro.requestMethod,
+                resourceLocationConfig,
+            });
+            return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}));
+        },
+        poll: lro.sendPollRequest,
+    }, {
+        intervalInMs,
+        withOperationLocation,
+        restoreFrom,
+        updateState,
+        processResult: processResult
+            ? ({ flatResponse }, state) => processResult(flatResponse, state)
+            : ({ flatResponse }) => flatResponse,
+    });
+}
+exports.createHttpPoller = createHttpPoller;
+//# sourceMappingURL=poller.js.map
+
+/***/ }),
+
+/***/ 2670:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createHttpPoller = void 0;
+const tslib_1 = __nccwpck_require__(1577);
+var poller_js_1 = __nccwpck_require__(561);
+Object.defineProperty(exports, "createHttpPoller", ({ enumerable: true, get: function () { return poller_js_1.createHttpPoller; } }));
+/**
+ * This can be uncommented to expose the protocol-agnostic poller
+ */
+// export {
+//   BuildCreatePollerOptions,
+//   Operation,
+//   CreatePollerOptions,
+//   OperationConfig,
+//   RestorableOperationState,
+// } from "./poller/models";
+// export { buildCreatePoller } from "./poller/poller";
+/** legacy */
+tslib_1.__exportStar(__nccwpck_require__(6026), exports);
+tslib_1.__exportStar(__nccwpck_require__(5804), exports);
+tslib_1.__exportStar(__nccwpck_require__(2290), exports);
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 6026:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.LroEngine = void 0;
+var lroEngine_js_1 = __nccwpck_require__(9331);
+Object.defineProperty(exports, "LroEngine", ({ enumerable: true, get: function () { return lroEngine_js_1.LroEngine; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 9331:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.LroEngine = void 0;
+const operation_js_1 = __nccwpck_require__(4951);
+const constants_js_1 = __nccwpck_require__(2216);
+const poller_js_1 = __nccwpck_require__(5804);
+const operation_js_2 = __nccwpck_require__(804);
+/**
+ * The LRO Engine, a class that performs polling.
+ */
+class LroEngine extends poller_js_1.Poller {
+    constructor(lro, options) {
+        const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {};
+        const state = resumeFrom
+            ? (0, operation_js_2.deserializeState)(resumeFrom)
+            : {};
+        const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone);
+        super(operation);
+        this.resolveOnUnsuccessful = resolveOnUnsuccessful;
+        this.config = { intervalInMs: intervalInMs };
+        operation.setPollerConfig(this.config);
+    }
+    /**
+     * The method used by the poller to wait before attempting to update its operation.
+     */
+    delay() {
+        return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));
+    }
+}
+exports.LroEngine = LroEngine;
+//# sourceMappingURL=lroEngine.js.map
+
+/***/ }),
+
+/***/ 4951:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.GenericPollOperation = void 0;
+const operation_js_1 = __nccwpck_require__(7146);
+const logger_js_1 = __nccwpck_require__(1932);
+const createStateProxy = () => ({
+    initState: (config) => ({ config, isStarted: true }),
+    setCanceled: (state) => (state.isCancelled = true),
+    setError: (state, error) => (state.error = error),
+    setResult: (state, result) => (state.result = result),
+    setRunning: (state) => (state.isStarted = true),
+    setSucceeded: (state) => (state.isCompleted = true),
+    setFailed: () => {
+        /** empty body */
+    },
+    getError: (state) => state.error,
+    getResult: (state) => state.result,
+    isCanceled: (state) => !!state.isCancelled,
+    isFailed: (state) => !!state.error,
+    isRunning: (state) => !!state.isStarted,
+    isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error),
+});
+class GenericPollOperation {
+    constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) {
+        this.state = state;
+        this.lro = lro;
+        this.setErrorAsResult = setErrorAsResult;
+        this.lroResourceLocationConfig = lroResourceLocationConfig;
+        this.processResult = processResult;
+        this.updateState = updateState;
+        this.isDone = isDone;
+    }
+    setPollerConfig(pollerConfig) {
+        this.pollerConfig = pollerConfig;
+    }
+    async update(options) {
+        var _a;
+        const stateProxy = createStateProxy();
+        if (!this.state.isStarted) {
+            this.state = Object.assign(Object.assign({}, this.state), (await (0, operation_js_1.initHttpOperation)({
+                lro: this.lro,
+                stateProxy,
+                resourceLocationConfig: this.lroResourceLocationConfig,
+                processResult: this.processResult,
+                setErrorAsResult: this.setErrorAsResult,
+            })));
+        }
+        const updateState = this.updateState;
+        const isDone = this.isDone;
+        if (!this.state.isCompleted && this.state.error === undefined) {
+            await (0, operation_js_1.pollHttpOperation)({
+                lro: this.lro,
+                state: this.state,
+                stateProxy,
+                processResult: this.processResult,
+                updateState: updateState
+                    ? (state, { rawResponse }) => updateState(state, rawResponse)
+                    : undefined,
+                isDone: isDone
+                    ? ({ flatResponse }, state) => isDone(flatResponse, state)
+                    : undefined,
+                options,
+                setDelay: (intervalInMs) => {
+                    this.pollerConfig.intervalInMs = intervalInMs;
+                },
+                setErrorAsResult: this.setErrorAsResult,
+            });
+        }
+        (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state);
+        return this;
+    }
+    async cancel() {
+        logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented");
+        return this;
+    }
+    /**
+     * Serializes the Poller operation.
+     */
+    toString() {
+        return JSON.stringify({
+            state: this.state,
+        });
+    }
+}
+exports.GenericPollOperation = GenericPollOperation;
+//# sourceMappingURL=operation.js.map
+
+/***/ }),
+
+/***/ 2290:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+//# sourceMappingURL=pollOperation.js.map
+
+/***/ }),
+
+/***/ 5804:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Poller = exports.PollerCancelledError = exports.PollerStoppedError = void 0;
+/**
+ * When a poller is manually stopped through the `stopPolling` method,
+ * the poller will be rejected with an instance of the PollerStoppedError.
+ */
+class PollerStoppedError extends Error {
+    constructor(message) {
+        super(message);
+        this.name = "PollerStoppedError";
+        Object.setPrototypeOf(this, PollerStoppedError.prototype);
+    }
+}
+exports.PollerStoppedError = PollerStoppedError;
+/**
+ * When the operation is cancelled, the poller will be rejected with an instance
+ * of the PollerCancelledError.
+ */
+class PollerCancelledError extends Error {
+    constructor(message) {
+        super(message);
+        this.name = "PollerCancelledError";
+        Object.setPrototypeOf(this, PollerCancelledError.prototype);
+    }
+}
+exports.PollerCancelledError = PollerCancelledError;
+/**
+ * A class that represents the definition of a program that polls through consecutive requests
+ * until it reaches a state of completion.
+ *
+ * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.
+ * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.
+ * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.
+ *
+ * ```ts
+ * const poller = new MyPoller();
+ *
+ * // Polling just once:
+ * await poller.poll();
+ *
+ * // We can try to cancel the request here, by calling:
+ * //
+ * //     await poller.cancelOperation();
+ * //
+ *
+ * // Getting the final result:
+ * const result = await poller.pollUntilDone();
+ * ```
+ *
+ * The Poller is defined by two types, a type representing the state of the poller, which
+ * must include a basic set of properties from `PollOperationState<TResult>`,
+ * and a return type defined by `TResult`, which can be anything.
+ *
+ * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having
+ * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.
+ *
+ * ```ts
+ * class Client {
+ *   public async makePoller: PollerLike<MyOperationState, MyResult> {
+ *     const poller = new MyPoller({});
+ *     // It might be preferred to return the poller after the first request is made,
+ *     // so that some information can be obtained right away.
+ *     await poller.poll();
+ *     return poller;
+ *   }
+ * }
+ *
+ * const poller: PollerLike<MyOperationState, MyResult> = myClient.makePoller();
+ * ```
+ *
+ * A poller can be created through its constructor, then it can be polled until it's completed.
+ * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.
+ * At any point in time, the intermediate forms of the result type can be requested without delay.
+ * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.
+ *
+ * ```ts
+ * const poller = myClient.makePoller();
+ * const state: MyOperationState = poller.getOperationState();
+ *
+ * // The intermediate result can be obtained at any time.
+ * const result: MyResult | undefined = poller.getResult();
+ *
+ * // The final result can only be obtained after the poller finishes.
+ * const result: MyResult = await poller.pollUntilDone();
+ * ```
+ *
+ */
+// eslint-disable-next-line no-use-before-define
+class Poller {
+    /**
+     * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation<TState, TResult>`.
+     *
+     * When writing an implementation of a Poller, this implementation needs to deal with the initialization
+     * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's
+     * operation has already been defined, at least its basic properties. The code below shows how to approach
+     * the definition of the constructor of a new custom poller.
+     *
+     * ```ts
+     * export class MyPoller extends Poller<MyOperationState, string> {
+     *   constructor({
+     *     // Anything you might need outside of the basics
+     *   }) {
+     *     let state: MyOperationState = {
+     *       privateProperty: private,
+     *       publicProperty: public,
+     *     };
+     *
+     *     const operation = {
+     *       state,
+     *       update,
+     *       cancel,
+     *       toString
+     *     }
+     *
+     *     // Sending the operation to the parent's constructor.
+     *     super(operation);
+     *
+     *     // You can assign more local properties here.
+     *   }
+     * }
+     * ```
+     *
+     * Inside of this constructor, a new promise is created. This will be used to
+     * tell the user when the poller finishes (see `pollUntilDone()`). The promise's
+     * resolve and reject methods are also used internally to control when to resolve
+     * or reject anyone waiting for the poller to finish.
+     *
+     * The constructor of a custom implementation of a poller is where any serialized version of
+     * a previous poller's operation should be deserialized into the operation sent to the
+     * base constructor. For example:
+     *
+     * ```ts
+     * export class MyPoller extends Poller<MyOperationState, string> {
+     *   constructor(
+     *     baseOperation: string | undefined
+     *   ) {
+     *     let state: MyOperationState = {};
+     *     if (baseOperation) {
+     *       state = {
+     *         ...JSON.parse(baseOperation).state,
+     *         ...state
+     *       };
+     *     }
+     *     const operation = {
+     *       state,
+     *       // ...
+     *     }
+     *     super(operation);
+     *   }
+     * }
+     * ```
+     *
+     * @param operation - Must contain the basic properties of `PollOperation<State, TResult>`.
+     */
+    constructor(operation) {
+        /** controls whether to throw an error if the operation failed or was canceled. */
+        this.resolveOnUnsuccessful = false;
+        this.stopped = true;
+        this.pollProgressCallbacks = [];
+        this.operation = operation;
+        this.promise = new Promise((resolve, reject) => {
+            this.resolve = resolve;
+            this.reject = reject;
+        });
+        // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.
+        // The above warning would get thrown if `poller.poll` is called, it returns an error,
+        // and pullUntilDone did not have a .catch or await try/catch on it's return value.
+        this.promise.catch(() => {
+            /* intentionally blank */
+        });
+    }
+    /**
+     * Starts a loop that will break only if the poller is done
+     * or if the poller is stopped.
+     */
+    async startPolling(pollOptions = {}) {
+        if (this.stopped) {
+            this.stopped = false;
+        }
+        while (!this.isStopped() && !this.isDone()) {
+            await this.poll(pollOptions);
+            await this.delay();
+        }
+    }
+    /**
+     * pollOnce does one polling, by calling to the update method of the underlying
+     * poll operation to make any relevant change effective.
+     *
+     * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike.
+     *
+     * @param options - Optional properties passed to the operation's update method.
+     */
+    async pollOnce(options = {}) {
+        if (!this.isDone()) {
+            this.operation = await this.operation.update({
+                abortSignal: options.abortSignal,
+                fireProgress: this.fireProgress.bind(this),
+            });
+        }
+        this.processUpdatedState();
+    }
+    /**
+     * fireProgress calls the functions passed in via onProgress the method of the poller.
+     *
+     * It loops over all of the callbacks received from onProgress, and executes them, sending them
+     * the current operation state.
+     *
+     * @param state - The current operation state.
+     */
+    fireProgress(state) {
+        for (const callback of this.pollProgressCallbacks) {
+            callback(state);
+        }
+    }
+    /**
+     * Invokes the underlying operation's cancel method.
+     */
+    async cancelOnce(options = {}) {
+        this.operation = await this.operation.cancel(options);
+    }
+    /**
+     * Returns a promise that will resolve once a single polling request finishes.
+     * It does this by calling the update method of the Poller's operation.
+     *
+     * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike.
+     *
+     * @param options - Optional properties passed to the operation's update method.
+     */
+    poll(options = {}) {
+        if (!this.pollOncePromise) {
+            this.pollOncePromise = this.pollOnce(options);
+            const clearPollOncePromise = () => {
+                this.pollOncePromise = undefined;
+            };
+            this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);
+        }
+        return this.pollOncePromise;
+    }
+    processUpdatedState() {
+        if (this.operation.state.error) {
+            this.stopped = true;
+            if (!this.resolveOnUnsuccessful) {
+                this.reject(this.operation.state.error);
+                throw this.operation.state.error;
+            }
+        }
+        if (this.operation.state.isCancelled) {
+            this.stopped = true;
+            if (!this.resolveOnUnsuccessful) {
+                const error = new PollerCancelledError("Operation was canceled");
+                this.reject(error);
+                throw error;
+            }
+        }
+        if (this.isDone() && this.resolve) {
+            // If the poller has finished polling, this means we now have a result.
+            // However, it can be the case that TResult is instantiated to void, so
+            // we are not expecting a result anyway. To assert that we might not
+            // have a result eventually after finishing polling, we cast the result
+            // to TResult.
+            this.resolve(this.getResult());
+        }
+    }
+    /**
+     * Returns a promise that will resolve once the underlying operation is completed.
+     */
+    async pollUntilDone(pollOptions = {}) {
+        if (this.stopped) {
+            this.startPolling(pollOptions).catch(this.reject);
+        }
+        // This is needed because the state could have been updated by
+        // `cancelOperation`, e.g. the operation is canceled or an error occurred.
+        this.processUpdatedState();
+        return this.promise;
+    }
+    /**
+     * Invokes the provided callback after each polling is completed,
+     * sending the current state of the poller's operation.
+     *
+     * It returns a method that can be used to stop receiving updates on the given callback function.
+     */
+    onProgress(callback) {
+        this.pollProgressCallbacks.push(callback);
+        return () => {
+            this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);
+        };
+    }
+    /**
+     * Returns true if the poller has finished polling.
+     */
+    isDone() {
+        const state = this.operation.state;
+        return Boolean(state.isCompleted || state.isCancelled || state.error);
+    }
+    /**
+     * Stops the poller from continuing to poll.
+     */
+    stopPolling() {
+        if (!this.stopped) {
+            this.stopped = true;
+            if (this.reject) {
+                this.reject(new PollerStoppedError("This poller is already stopped"));
+            }
+        }
+    }
+    /**
+     * Returns true if the poller is stopped.
+     */
+    isStopped() {
+        return this.stopped;
+    }
+    /**
+     * Attempts to cancel the underlying operation.
+     *
+     * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike.
+     *
+     * If it's called again before it finishes, it will throw an error.
+     *
+     * @param options - Optional properties passed to the operation's update method.
+     */
+    cancelOperation(options = {}) {
+        if (!this.cancelPromise) {
+            this.cancelPromise = this.cancelOnce(options);
+        }
+        else if (options.abortSignal) {
+            throw new Error("A cancel request is currently pending");
+        }
+        return this.cancelPromise;
+    }
+    /**
+     * Returns the state of the operation.
+     *
+     * Even though TState will be the same type inside any of the methods of any extension of the Poller class,
+     * implementations of the pollers can customize what's shared with the public by writing their own
+     * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller
+     * and a public type representing a safe to share subset of the properties of the internal state.
+     * Their definition of getOperationState can then return their public type.
+     *
+     * Example:
+     *
+     * ```ts
+     * // Let's say we have our poller's operation state defined as:
+     * interface MyOperationState extends PollOperationState<ResultType> {
+     *   privateProperty?: string;
+     *   publicProperty?: string;
+     * }
+     *
+     * // To allow us to have a true separation of public and private state, we have to define another interface:
+     * interface PublicState extends PollOperationState<ResultType> {
+     *   publicProperty?: string;
+     * }
+     *
+     * // Then, we define our Poller as follows:
+     * export class MyPoller extends Poller<MyOperationState, ResultType> {
+     *   // ... More content is needed here ...
+     *
+     *   public getOperationState(): PublicState {
+     *     const state: PublicState = this.operation.state;
+     *     return {
+     *       // Properties from PollOperationState<TResult>
+     *       isStarted: state.isStarted,
+     *       isCompleted: state.isCompleted,
+     *       isCancelled: state.isCancelled,
+     *       error: state.error,
+     *       result: state.result,
+     *
+     *       // The only other property needed by PublicState.
+     *       publicProperty: state.publicProperty
+     *     }
+     *   }
+     * }
+     * ```
+     *
+     * You can see this in the tests of this repository, go to the file:
+     * `../test/utils/testPoller.ts`
+     * and look for the getOperationState implementation.
+     */
+    getOperationState() {
+        return this.operation.state;
+    }
+    /**
+     * Returns the result value of the operation,
+     * regardless of the state of the poller.
+     * It can return undefined or an incomplete form of the final TResult value
+     * depending on the implementation.
+     */
+    getResult() {
+        const state = this.operation.state;
+        return state.result;
+    }
+    /**
+     * Returns a serialized version of the poller's operation
+     * by invoking the operation's toString method.
+     */
+    toString() {
+        return this.operation.toString();
+    }
+}
+exports.Poller = Poller;
+//# sourceMappingURL=poller.js.map
+
+/***/ }),
+
+/***/ 1932:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.logger = void 0;
+const logger_1 = __nccwpck_require__(5851);
+/**
+ * The `@azure/logger` configuration for this package.
+ * @internal
+ */
+exports.logger = (0, logger_1.createClientLogger)("core-lro");
+//# sourceMappingURL=logger.js.map
+
+/***/ }),
+
+/***/ 2216:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.terminalStates = exports.POLL_INTERVAL_IN_MS = void 0;
+/**
+ * The default time interval to wait before sending the next polling request.
+ */
+exports.POLL_INTERVAL_IN_MS = 2000;
+/**
+ * The closed set of terminal states.
+ */
+exports.terminalStates = ["succeeded", "canceled", "failed"];
+//# sourceMappingURL=constants.js.map
+
+/***/ }),
+
+/***/ 804:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.pollOperation = exports.initOperation = exports.deserializeState = void 0;
+const logger_js_1 = __nccwpck_require__(1932);
+const constants_js_1 = __nccwpck_require__(2216);
+/**
+ * Deserializes the state
+ */
+function deserializeState(serializedState) {
+    try {
+        return JSON.parse(serializedState).state;
+    }
+    catch (e) {
+        throw new Error(`Unable to deserialize input state: ${serializedState}`);
+    }
+}
+exports.deserializeState = deserializeState;
+function setStateError(inputs) {
+    const { state, stateProxy, isOperationError } = inputs;
+    return (error) => {
+        if (isOperationError(error)) {
+            stateProxy.setError(state, error);
+            stateProxy.setFailed(state);
+        }
+        throw error;
+    };
+}
+function appendReadableErrorMessage(currentMessage, innerMessage) {
+    let message = currentMessage;
+    if (message.slice(-1) !== ".") {
+        message = message + ".";
+    }
+    return message + " " + innerMessage;
+}
+function simplifyError(err) {
+    let message = err.message;
+    let code = err.code;
+    let curErr = err;
+    while (curErr.innererror) {
+        curErr = curErr.innererror;
+        code = curErr.code;
+        message = appendReadableErrorMessage(message, curErr.message);
+    }
+    return {
+        code,
+        message,
+    };
+}
+function processOperationStatus(result) {
+    const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result;
+    switch (status) {
+        case "succeeded": {
+            stateProxy.setSucceeded(state);
+            break;
+        }
+        case "failed": {
+            const err = getError === null || getError === void 0 ? void 0 : getError(response);
+            let postfix = "";
+            if (err) {
+                const { code, message } = simplifyError(err);
+                postfix = `. ${code}. ${message}`;
+            }
+            const errStr = `The long-running operation has failed${postfix}`;
+            stateProxy.setError(state, new Error(errStr));
+            stateProxy.setFailed(state);
+            logger_js_1.logger.warning(errStr);
+            break;
+        }
+        case "canceled": {
+            stateProxy.setCanceled(state);
+            break;
+        }
+    }
+    if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) ||
+        (isDone === undefined &&
+            ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) {
+        stateProxy.setResult(state, buildResult({
+            response,
+            state,
+            processResult,
+        }));
+    }
+}
+function buildResult(inputs) {
+    const { processResult, response, state } = inputs;
+    return processResult ? processResult(response, state) : response;
+}
+/**
+ * Initiates the long-running operation.
+ */
+async function initOperation(inputs) {
+    const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs;
+    const { operationLocation, resourceLocation, metadata, response } = await init();
+    if (operationLocation)
+        withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false);
+    const config = {
+        metadata,
+        operationLocation,
+        resourceLocation,
+    };
+    logger_js_1.logger.verbose(`LRO: Operation description:`, config);
+    const state = stateProxy.initState(config);
+    const status = getOperationStatus({ response, state, operationLocation });
+    processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult });
+    return state;
+}
+exports.initOperation = initOperation;
+async function pollOperationHelper(inputs) {
+    const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs;
+    const response = await poll(operationLocation, options).catch(setStateError({
+        state,
+        stateProxy,
+        isOperationError,
+    }));
+    const status = getOperationStatus(response, state);
+    logger_js_1.logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`);
+    if (status === "succeeded") {
+        const resourceLocation = getResourceLocation(response, state);
+        if (resourceLocation !== undefined) {
+            return {
+                response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })),
+                status,
+            };
+        }
+    }
+    return { response, status };
+}
+/** Polls the long-running operation. */
+async function pollOperation(inputs) {
+    const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs;
+    const { operationLocation } = state.config;
+    if (operationLocation !== undefined) {
+        const { response, status } = await pollOperationHelper({
+            poll,
+            getOperationStatus,
+            state,
+            stateProxy,
+            operationLocation,
+            getResourceLocation,
+            isOperationError,
+            options,
+        });
+        processOperationStatus({
+            status,
+            response,
+            state,
+            stateProxy,
+            isDone,
+            processResult,
+            getError,
+            setErrorAsResult,
+        });
+        if (!constants_js_1.terminalStates.includes(status)) {
+            const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response);
+            if (intervalInMs)
+                setDelay(intervalInMs);
+            const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state);
+            if (location !== undefined) {
+                const isUpdated = operationLocation !== location;
+                state.config.operationLocation = location;
+                withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated);
+            }
+            else
+                withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false);
+        }
+        updateState === null || updateState === void 0 ? void 0 : updateState(state, response);
+    }
+}
+exports.pollOperation = pollOperation;
+//# sourceMappingURL=operation.js.map
+
+/***/ }),
+
+/***/ 1975:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.buildCreatePoller = void 0;
+const operation_js_1 = __nccwpck_require__(804);
+const constants_js_1 = __nccwpck_require__(2216);
+const core_util_1 = __nccwpck_require__(402);
+const createStateProxy = () => ({
+    /**
+     * The state at this point is created to be of type OperationState<TResult>.
+     * It will be updated later to be of type TState when the
+     * customer-provided callback, `updateState`, is called during polling.
+     */
+    initState: (config) => ({ status: "running", config }),
+    setCanceled: (state) => (state.status = "canceled"),
+    setError: (state, error) => (state.error = error),
+    setResult: (state, result) => (state.result = result),
+    setRunning: (state) => (state.status = "running"),
+    setSucceeded: (state) => (state.status = "succeeded"),
+    setFailed: (state) => (state.status = "failed"),
+    getError: (state) => state.error,
+    getResult: (state) => state.result,
+    isCanceled: (state) => state.status === "canceled",
+    isFailed: (state) => state.status === "failed",
+    isRunning: (state) => state.status === "running",
+    isSucceeded: (state) => state.status === "succeeded",
+});
+/**
+ * Returns a poller factory.
+ */
+function buildCreatePoller(inputs) {
+    const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs;
+    return async ({ init, poll }, options) => {
+        const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom, } = options || {};
+        const stateProxy = createStateProxy();
+        const withOperationLocation = withOperationLocationCallback
+            ? (() => {
+                let called = false;
+                return (operationLocation, isUpdated) => {
+                    if (isUpdated)
+                        withOperationLocationCallback(operationLocation);
+                    else if (!called)
+                        withOperationLocationCallback(operationLocation);
+                    called = true;
+                };
+            })()
+            : undefined;
+        const state = restoreFrom
+            ? (0, operation_js_1.deserializeState)(restoreFrom)
+            : await (0, operation_js_1.initOperation)({
+                init,
+                stateProxy,
+                processResult,
+                getOperationStatus: getStatusFromInitialResponse,
+                withOperationLocation,
+                setErrorAsResult: !resolveOnUnsuccessful,
+            });
+        let resultPromise;
+        const abortController = new AbortController();
+        const handlers = new Map();
+        const handleProgressEvents = async () => handlers.forEach((h) => h(state));
+        const cancelErrMsg = "Operation was canceled";
+        let currentPollIntervalInMs = intervalInMs;
+        const poller = {
+            getOperationState: () => state,
+            getResult: () => state.result,
+            isDone: () => ["succeeded", "failed", "canceled"].includes(state.status),
+            isStopped: () => resultPromise === undefined,
+            stopPolling: () => {
+                abortController.abort();
+            },
+            toString: () => JSON.stringify({
+                state,
+            }),
+            onProgress: (callback) => {
+                const s = Symbol();
+                handlers.set(s, callback);
+                return () => handlers.delete(s);
+            },
+            pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => {
+                const { abortSignal: inputAbortSignal } = pollOptions || {};
+                // In the future we can use AbortSignal.any() instead
+                function abortListener() {
+                    abortController.abort();
+                }
+                const abortSignal = abortController.signal;
+                if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) {
+                    abortController.abort();
+                }
+                else if (!abortSignal.aborted) {
+                    inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true });
+                }
+                try {
+                    if (!poller.isDone()) {
+                        await poller.poll({ abortSignal });
+                        while (!poller.isDone()) {
+                            await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal });
+                            await poller.poll({ abortSignal });
+                        }
+                    }
+                }
+                finally {
+                    inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener);
+                }
+                if (resolveOnUnsuccessful) {
+                    return poller.getResult();
+                }
+                else {
+                    switch (state.status) {
+                        case "succeeded":
+                            return poller.getResult();
+                        case "canceled":
+                            throw new Error(cancelErrMsg);
+                        case "failed":
+                            throw state.error;
+                        case "notStarted":
+                        case "running":
+                            throw new Error(`Polling completed without succeeding or failing`);
+                    }
+                }
+            })().finally(() => {
+                resultPromise = undefined;
+            }))),
+            async poll(pollOptions) {
+                if (resolveOnUnsuccessful) {
+                    if (poller.isDone())
+                        return;
+                }
+                else {
+                    switch (state.status) {
+                        case "succeeded":
+                            return;
+                        case "canceled":
+                            throw new Error(cancelErrMsg);
+                        case "failed":
+                            throw state.error;
+                    }
+                }
+                await (0, operation_js_1.pollOperation)({
+                    poll,
+                    state,
+                    stateProxy,
+                    getOperationLocation,
+                    isOperationError,
+                    withOperationLocation,
+                    getPollingInterval,
+                    getOperationStatus: getStatusFromPollResponse,
+                    getResourceLocation,
+                    processResult,
+                    getError,
+                    updateState,
+                    options: pollOptions,
+                    setDelay: (pollIntervalInMs) => {
+                        currentPollIntervalInMs = pollIntervalInMs;
+                    },
+                    setErrorAsResult: !resolveOnUnsuccessful,
+                });
+                await handleProgressEvents();
+                if (!resolveOnUnsuccessful) {
+                    switch (state.status) {
+                        case "canceled":
+                            throw new Error(cancelErrMsg);
+                        case "failed":
+                            throw state.error;
+                    }
+                }
+            },
+        };
+        return poller;
+    };
+}
+exports.buildCreatePoller = buildCreatePoller;
+//# sourceMappingURL=poller.js.map
+
+/***/ }),
+
+/***/ 1652:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0;
+exports.SDK_VERSION = "1.18.0";
+exports.DEFAULT_RETRY_POLICY_COUNT = 3;
+//# sourceMappingURL=constants.js.map
+
+/***/ }),
+
+/***/ 1765:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createPipelineFromOptions = createPipelineFromOptions;
+const logPolicy_js_1 = __nccwpck_require__(9060);
+const pipeline_js_1 = __nccwpck_require__(7311);
+const redirectPolicy_js_1 = __nccwpck_require__(4308);
+const userAgentPolicy_js_1 = __nccwpck_require__(9398);
+const multipartPolicy_js_1 = __nccwpck_require__(2730);
+const decompressResponsePolicy_js_1 = __nccwpck_require__(9712);
+const defaultRetryPolicy_js_1 = __nccwpck_require__(2122);
+const formDataPolicy_js_1 = __nccwpck_require__(7870);
+const core_util_1 = __nccwpck_require__(402);
+const proxyPolicy_js_1 = __nccwpck_require__(338);
+const setClientRequestIdPolicy_js_1 = __nccwpck_require__(9417);
+const tlsPolicy_js_1 = __nccwpck_require__(6551);
+const tracingPolicy_js_1 = __nccwpck_require__(4396);
+/**
+ * Create a new pipeline with a default set of customizable policies.
+ * @param options - Options to configure a custom pipeline.
+ */
+function createPipelineFromOptions(options) {
+    var _a;
+    const pipeline = (0, pipeline_js_1.createEmptyPipeline)();
+    if (core_util_1.isNodeLike) {
+        if (options.tlsOptions) {
+            pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions));
+        }
+        pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions));
+        pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)());
+    }
+    pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] });
+    pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions));
+    pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName));
+    // The multipart policy is added after policies with no phase, so that
+    // policies can be added between it and formDataPolicy to modify
+    // properties (e.g., making the boundary constant in recorded tests).
+    pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" });
+    pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" });
+    pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)(Object.assign(Object.assign({}, options.userAgentOptions), options.loggingOptions)), {
+        afterPhase: "Retry",
+    });
+    if (core_util_1.isNodeLike) {
+        // Both XHR and Fetch expect to handle redirects automatically,
+        // so only include this policy when we're in Node.
+        pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" });
+    }
+    pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" });
+    return pipeline;
+}
+//# sourceMappingURL=createPipelineFromOptions.js.map
+
+/***/ }),
+
+/***/ 6123:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createDefaultHttpClient = createDefaultHttpClient;
+const nodeHttpClient_js_1 = __nccwpck_require__(9362);
+/**
+ * Create the correct HttpClient for the current environment.
+ */
+function createDefaultHttpClient() {
+    return (0, nodeHttpClient_js_1.createNodeHttpClient)();
+}
+//# sourceMappingURL=defaultHttpClient.js.map
+
+/***/ }),
+
+/***/ 6955:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createHttpHeaders = createHttpHeaders;
+function normalizeName(name) {
+    return name.toLowerCase();
+}
+function* headerIterator(map) {
+    for (const entry of map.values()) {
+        yield [entry.name, entry.value];
+    }
+}
+class HttpHeadersImpl {
+    constructor(rawHeaders) {
+        this._headersMap = new Map();
+        if (rawHeaders) {
+            for (const headerName of Object.keys(rawHeaders)) {
+                this.set(headerName, rawHeaders[headerName]);
+            }
+        }
+    }
+    /**
+     * Set a header in this collection with the provided name and value. The name is
+     * case-insensitive.
+     * @param name - The name of the header to set. This value is case-insensitive.
+     * @param value - The value of the header to set.
+     */
+    set(name, value) {
+        this._headersMap.set(normalizeName(name), { name, value: String(value).trim() });
+    }
+    /**
+     * Get the header value for the provided header name, or undefined if no header exists in this
+     * collection with the provided name.
+     * @param name - The name of the header. This value is case-insensitive.
+     */
+    get(name) {
+        var _a;
+        return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value;
+    }
+    /**
+     * Get whether or not this header collection contains a header entry for the provided header name.
+     * @param name - The name of the header to set. This value is case-insensitive.
+     */
+    has(name) {
+        return this._headersMap.has(normalizeName(name));
+    }
+    /**
+     * Remove the header with the provided headerName.
+     * @param name - The name of the header to remove.
+     */
+    delete(name) {
+        this._headersMap.delete(normalizeName(name));
+    }
+    /**
+     * Get the JSON object representation of this HTTP header collection.
+     */
+    toJSON(options = {}) {
+        const result = {};
+        if (options.preserveCase) {
+            for (const entry of this._headersMap.values()) {
+                result[entry.name] = entry.value;
+            }
+        }
+        else {
+            for (const [normalizedName, entry] of this._headersMap) {
+                result[normalizedName] = entry.value;
+            }
+        }
+        return result;
+    }
+    /**
+     * Get the string representation of this HTTP header collection.
+     */
+    toString() {
+        return JSON.stringify(this.toJSON({ preserveCase: true }));
+    }
+    /**
+     * Iterate over tuples of header [name, value] pairs.
+     */
+    [Symbol.iterator]() {
+        return headerIterator(this._headersMap);
+    }
+}
+/**
+ * Creates an object that satisfies the `HttpHeaders` interface.
+ * @param rawHeaders - A simple object representing initial headers
+ */
+function createHttpHeaders(rawHeaders) {
+    return new HttpHeadersImpl(rawHeaders);
+}
+//# sourceMappingURL=httpHeaders.js.map
+
+/***/ }),
+
+/***/ 5565:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createFileFromStream = exports.createFile = exports.auxiliaryAuthenticationHeaderPolicyName = exports.auxiliaryAuthenticationHeaderPolicy = exports.ndJsonPolicyName = exports.ndJsonPolicy = exports.bearerTokenAuthenticationPolicyName = exports.bearerTokenAuthenticationPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.userAgentPolicyName = exports.userAgentPolicy = exports.defaultRetryPolicy = exports.tracingPolicyName = exports.tracingPolicy = exports.retryPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.setClientRequestIdPolicyName = exports.setClientRequestIdPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.isRestError = exports.RestError = exports.createPipelineRequest = exports.createHttpHeaders = exports.createDefaultHttpClient = exports.createPipelineFromOptions = exports.createEmptyPipeline = void 0;
+var pipeline_js_1 = __nccwpck_require__(7311);
+Object.defineProperty(exports, "createEmptyPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createEmptyPipeline; } }));
+var createPipelineFromOptions_js_1 = __nccwpck_require__(1765);
+Object.defineProperty(exports, "createPipelineFromOptions", ({ enumerable: true, get: function () { return createPipelineFromOptions_js_1.createPipelineFromOptions; } }));
+var defaultHttpClient_js_1 = __nccwpck_require__(6123);
+Object.defineProperty(exports, "createDefaultHttpClient", ({ enumerable: true, get: function () { return defaultHttpClient_js_1.createDefaultHttpClient; } }));
+var httpHeaders_js_1 = __nccwpck_require__(6955);
+Object.defineProperty(exports, "createHttpHeaders", ({ enumerable: true, get: function () { return httpHeaders_js_1.createHttpHeaders; } }));
+var pipelineRequest_js_1 = __nccwpck_require__(6386);
+Object.defineProperty(exports, "createPipelineRequest", ({ enumerable: true, get: function () { return pipelineRequest_js_1.createPipelineRequest; } }));
+var restError_js_1 = __nccwpck_require__(4503);
+Object.defineProperty(exports, "RestError", ({ enumerable: true, get: function () { return restError_js_1.RestError; } }));
+Object.defineProperty(exports, "isRestError", ({ enumerable: true, get: function () { return restError_js_1.isRestError; } }));
+var decompressResponsePolicy_js_1 = __nccwpck_require__(9712);
+Object.defineProperty(exports, "decompressResponsePolicy", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicy; } }));
+Object.defineProperty(exports, "decompressResponsePolicyName", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicyName; } }));
+var exponentialRetryPolicy_js_1 = __nccwpck_require__(9447);
+Object.defineProperty(exports, "exponentialRetryPolicy", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicy; } }));
+Object.defineProperty(exports, "exponentialRetryPolicyName", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; } }));
+var setClientRequestIdPolicy_js_1 = __nccwpck_require__(9417);
+Object.defineProperty(exports, "setClientRequestIdPolicy", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; } }));
+Object.defineProperty(exports, "setClientRequestIdPolicyName", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; } }));
+var logPolicy_js_1 = __nccwpck_require__(9060);
+Object.defineProperty(exports, "logPolicy", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicy; } }));
+Object.defineProperty(exports, "logPolicyName", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicyName; } }));
+var multipartPolicy_js_1 = __nccwpck_require__(2730);
+Object.defineProperty(exports, "multipartPolicy", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicy; } }));
+Object.defineProperty(exports, "multipartPolicyName", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicyName; } }));
+var proxyPolicy_js_1 = __nccwpck_require__(338);
+Object.defineProperty(exports, "proxyPolicy", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicy; } }));
+Object.defineProperty(exports, "proxyPolicyName", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicyName; } }));
+Object.defineProperty(exports, "getDefaultProxySettings", ({ enumerable: true, get: function () { return proxyPolicy_js_1.getDefaultProxySettings; } }));
+var redirectPolicy_js_1 = __nccwpck_require__(4308);
+Object.defineProperty(exports, "redirectPolicy", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicy; } }));
+Object.defineProperty(exports, "redirectPolicyName", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicyName; } }));
+var systemErrorRetryPolicy_js_1 = __nccwpck_require__(985);
+Object.defineProperty(exports, "systemErrorRetryPolicy", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; } }));
+Object.defineProperty(exports, "systemErrorRetryPolicyName", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; } }));
+var throttlingRetryPolicy_js_1 = __nccwpck_require__(5237);
+Object.defineProperty(exports, "throttlingRetryPolicy", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicy; } }));
+Object.defineProperty(exports, "throttlingRetryPolicyName", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; } }));
+var retryPolicy_js_1 = __nccwpck_require__(2876);
+Object.defineProperty(exports, "retryPolicy", ({ enumerable: true, get: function () { return retryPolicy_js_1.retryPolicy; } }));
+var tracingPolicy_js_1 = __nccwpck_require__(4396);
+Object.defineProperty(exports, "tracingPolicy", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicy; } }));
+Object.defineProperty(exports, "tracingPolicyName", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicyName; } }));
+var defaultRetryPolicy_js_1 = __nccwpck_require__(2122);
+Object.defineProperty(exports, "defaultRetryPolicy", ({ enumerable: true, get: function () { return defaultRetryPolicy_js_1.defaultRetryPolicy; } }));
+var userAgentPolicy_js_1 = __nccwpck_require__(9398);
+Object.defineProperty(exports, "userAgentPolicy", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicy; } }));
+Object.defineProperty(exports, "userAgentPolicyName", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicyName; } }));
+var tlsPolicy_js_1 = __nccwpck_require__(6551);
+Object.defineProperty(exports, "tlsPolicy", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicy; } }));
+Object.defineProperty(exports, "tlsPolicyName", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicyName; } }));
+var formDataPolicy_js_1 = __nccwpck_require__(7870);
+Object.defineProperty(exports, "formDataPolicy", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicy; } }));
+Object.defineProperty(exports, "formDataPolicyName", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicyName; } }));
+var bearerTokenAuthenticationPolicy_js_1 = __nccwpck_require__(5700);
+Object.defineProperty(exports, "bearerTokenAuthenticationPolicy", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; } }));
+Object.defineProperty(exports, "bearerTokenAuthenticationPolicyName", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; } }));
+var ndJsonPolicy_js_1 = __nccwpck_require__(3864);
+Object.defineProperty(exports, "ndJsonPolicy", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicy; } }));
+Object.defineProperty(exports, "ndJsonPolicyName", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicyName; } }));
+var auxiliaryAuthenticationHeaderPolicy_js_1 = __nccwpck_require__(7179);
+Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicy", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; } }));
+Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicyName", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; } }));
+var file_js_1 = __nccwpck_require__(5378);
+Object.defineProperty(exports, "createFile", ({ enumerable: true, get: function () { return file_js_1.createFile; } }));
+Object.defineProperty(exports, "createFileFromStream", ({ enumerable: true, get: function () { return file_js_1.createFileFromStream; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 3747:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.logger = void 0;
+const logger_1 = __nccwpck_require__(5851);
+exports.logger = (0, logger_1.createClientLogger)("core-rest-pipeline");
+//# sourceMappingURL=log.js.map
+
+/***/ }),
+
+/***/ 9362:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getBodyLength = getBodyLength;
+exports.createNodeHttpClient = createNodeHttpClient;
+const tslib_1 = __nccwpck_require__(1577);
+const http = tslib_1.__importStar(__nccwpck_require__(7067));
+const https = tslib_1.__importStar(__nccwpck_require__(4708));
+const zlib = tslib_1.__importStar(__nccwpck_require__(8522));
+const node_stream_1 = __nccwpck_require__(7075);
+const abort_controller_1 = __nccwpck_require__(764);
+const httpHeaders_js_1 = __nccwpck_require__(6955);
+const restError_js_1 = __nccwpck_require__(4503);
+const log_js_1 = __nccwpck_require__(3747);
+const DEFAULT_TLS_SETTINGS = {};
+function isReadableStream(body) {
+    return body && typeof body.pipe === "function";
+}
+function isStreamComplete(stream) {
+    if (stream.readable === false) {
+        return Promise.resolve();
+    }
+    return new Promise((resolve) => {
+        const handler = () => {
+            resolve();
+            stream.removeListener("close", handler);
+            stream.removeListener("end", handler);
+            stream.removeListener("error", handler);
+        };
+        stream.on("close", handler);
+        stream.on("end", handler);
+        stream.on("error", handler);
+    });
+}
+function isArrayBuffer(body) {
+    return body && typeof body.byteLength === "number";
+}
+class ReportTransform extends node_stream_1.Transform {
+    // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
+    _transform(chunk, _encoding, callback) {
+        this.push(chunk);
+        this.loadedBytes += chunk.length;
+        try {
+            this.progressCallback({ loadedBytes: this.loadedBytes });
+            callback();
+        }
+        catch (e) {
+            callback(e);
+        }
+    }
+    constructor(progressCallback) {
+        super();
+        this.loadedBytes = 0;
+        this.progressCallback = progressCallback;
+    }
+}
+/**
+ * A HttpClient implementation that uses Node's "https" module to send HTTPS requests.
+ * @internal
+ */
+class NodeHttpClient {
+    constructor() {
+        this.cachedHttpsAgents = new WeakMap();
+    }
+    /**
+     * Makes a request over an underlying transport layer and returns the response.
+     * @param request - The request to be made.
+     */
+    async sendRequest(request) {
+        var _a, _b, _c;
+        const abortController = new AbortController();
+        let abortListener;
+        if (request.abortSignal) {
+            if (request.abortSignal.aborted) {
+                throw new abort_controller_1.AbortError("The operation was aborted.");
+            }
+            abortListener = (event) => {
+                if (event.type === "abort") {
+                    abortController.abort();
+                }
+            };
+            request.abortSignal.addEventListener("abort", abortListener);
+        }
+        if (request.timeout > 0) {
+            setTimeout(() => {
+                abortController.abort();
+            }, request.timeout);
+        }
+        const acceptEncoding = request.headers.get("Accept-Encoding");
+        const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate"));
+        let body = typeof request.body === "function" ? request.body() : request.body;
+        if (body && !request.headers.has("Content-Length")) {
+            const bodyLength = getBodyLength(body);
+            if (bodyLength !== null) {
+                request.headers.set("Content-Length", bodyLength);
+            }
+        }
+        let responseStream;
+        try {
+            if (body && request.onUploadProgress) {
+                const onUploadProgress = request.onUploadProgress;
+                const uploadReportStream = new ReportTransform(onUploadProgress);
+                uploadReportStream.on("error", (e) => {
+                    log_js_1.logger.error("Error in upload progress", e);
+                });
+                if (isReadableStream(body)) {
+                    body.pipe(uploadReportStream);
+                }
+                else {
+                    uploadReportStream.end(body);
+                }
+                body = uploadReportStream;
+            }
+            const res = await this.makeRequest(request, abortController, body);
+            const headers = getResponseHeaders(res);
+            const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0;
+            const response = {
+                status,
+                headers,
+                request,
+            };
+            // Responses to HEAD must not have a body.
+            // If they do return a body, that body must be ignored.
+            if (request.method === "HEAD") {
+                // call resume() and not destroy() to avoid closing the socket
+                // and losing keep alive
+                res.resume();
+                return response;
+            }
+            responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res;
+            const onDownloadProgress = request.onDownloadProgress;
+            if (onDownloadProgress) {
+                const downloadReportStream = new ReportTransform(onDownloadProgress);
+                downloadReportStream.on("error", (e) => {
+                    log_js_1.logger.error("Error in download progress", e);
+                });
+                responseStream.pipe(downloadReportStream);
+                responseStream = downloadReportStream;
+            }
+            if (
+            // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code
+            ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) ||
+                ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) {
+                response.readableStreamBody = responseStream;
+            }
+            else {
+                response.bodyAsText = await streamToText(responseStream);
+            }
+            return response;
+        }
+        finally {
+            // clean up event listener
+            if (request.abortSignal && abortListener) {
+                let uploadStreamDone = Promise.resolve();
+                if (isReadableStream(body)) {
+                    uploadStreamDone = isStreamComplete(body);
+                }
+                let downloadStreamDone = Promise.resolve();
+                if (isReadableStream(responseStream)) {
+                    downloadStreamDone = isStreamComplete(responseStream);
+                }
+                Promise.all([uploadStreamDone, downloadStreamDone])
+                    .then(() => {
+                    var _a;
+                    // eslint-disable-next-line promise/always-return
+                    if (abortListener) {
+                        (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener);
+                    }
+                })
+                    .catch((e) => {
+                    log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e);
+                });
+            }
+        }
+    }
+    makeRequest(request, abortController, body) {
+        var _a;
+        const url = new URL(request.url);
+        const isInsecure = url.protocol !== "https:";
+        if (isInsecure && !request.allowInsecureConnection) {
+            throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`);
+        }
+        const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure);
+        const options = {
+            agent,
+            hostname: url.hostname,
+            path: `${url.pathname}${url.search}`,
+            port: url.port,
+            method: request.method,
+            headers: request.headers.toJSON({ preserveCase: true }),
+        };
+        return new Promise((resolve, reject) => {
+            const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve);
+            req.once("error", (err) => {
+                var _a;
+                reject(new restError_js_1.RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : restError_js_1.RestError.REQUEST_SEND_ERROR, request }));
+            });
+            abortController.signal.addEventListener("abort", () => {
+                const abortError = new abort_controller_1.AbortError("The operation was aborted.");
+                req.destroy(abortError);
+                reject(abortError);
+            });
+            if (body && isReadableStream(body)) {
+                body.pipe(req);
+            }
+            else if (body) {
+                if (typeof body === "string" || Buffer.isBuffer(body)) {
+                    req.end(body);
+                }
+                else if (isArrayBuffer(body)) {
+                    req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body));
+                }
+                else {
+                    log_js_1.logger.error("Unrecognized body type", body);
+                    reject(new restError_js_1.RestError("Unrecognized body type"));
+                }
+            }
+            else {
+                // streams don't like "undefined" being passed as data
+                req.end();
+            }
+        });
+    }
+    getOrCreateAgent(request, isInsecure) {
+        var _a;
+        const disableKeepAlive = request.disableKeepAlive;
+        // Handle Insecure requests first
+        if (isInsecure) {
+            if (disableKeepAlive) {
+                // keepAlive:false is the default so we don't need a custom Agent
+                return http.globalAgent;
+            }
+            if (!this.cachedHttpAgent) {
+                // If there is no cached agent create a new one and cache it.
+                this.cachedHttpAgent = new http.Agent({ keepAlive: true });
+            }
+            return this.cachedHttpAgent;
+        }
+        else {
+            if (disableKeepAlive && !request.tlsSettings) {
+                // When there are no tlsSettings and keepAlive is false
+                // we don't need a custom agent
+                return https.globalAgent;
+            }
+            // We use the tlsSettings to index cached clients
+            const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS;
+            // Get the cached agent or create a new one with the
+            // provided values for keepAlive and tlsSettings
+            let agent = this.cachedHttpsAgents.get(tlsSettings);
+            if (agent && agent.options.keepAlive === !disableKeepAlive) {
+                return agent;
+            }
+            log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent");
+            agent = new https.Agent(Object.assign({ 
+                // keepAlive is true if disableKeepAlive is false.
+                keepAlive: !disableKeepAlive }, tlsSettings));
+            this.cachedHttpsAgents.set(tlsSettings, agent);
+            return agent;
+        }
+    }
+}
+function getResponseHeaders(res) {
+    const headers = (0, httpHeaders_js_1.createHttpHeaders)();
+    for (const header of Object.keys(res.headers)) {
+        const value = res.headers[header];
+        if (Array.isArray(value)) {
+            if (value.length > 0) {
+                headers.set(header, value[0]);
+            }
+        }
+        else if (value) {
+            headers.set(header, value);
+        }
+    }
+    return headers;
+}
+function getDecodedResponseStream(stream, headers) {
+    const contentEncoding = headers.get("Content-Encoding");
+    if (contentEncoding === "gzip") {
+        const unzip = zlib.createGunzip();
+        stream.pipe(unzip);
+        return unzip;
+    }
+    else if (contentEncoding === "deflate") {
+        const inflate = zlib.createInflate();
+        stream.pipe(inflate);
+        return inflate;
+    }
+    return stream;
+}
+function streamToText(stream) {
+    return new Promise((resolve, reject) => {
+        const buffer = [];
+        stream.on("data", (chunk) => {
+            if (Buffer.isBuffer(chunk)) {
+                buffer.push(chunk);
+            }
+            else {
+                buffer.push(Buffer.from(chunk));
+            }
+        });
+        stream.on("end", () => {
+            resolve(Buffer.concat(buffer).toString("utf8"));
+        });
+        stream.on("error", (e) => {
+            if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") {
+                reject(e);
+            }
+            else {
+                reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, {
+                    code: restError_js_1.RestError.PARSE_ERROR,
+                }));
+            }
+        });
+    });
+}
+/** @internal */
+function getBodyLength(body) {
+    if (!body) {
+        return 0;
+    }
+    else if (Buffer.isBuffer(body)) {
+        return body.length;
+    }
+    else if (isReadableStream(body)) {
+        return null;
+    }
+    else if (isArrayBuffer(body)) {
+        return body.byteLength;
+    }
+    else if (typeof body === "string") {
+        return Buffer.from(body).length;
+    }
+    else {
+        return null;
+    }
+}
+/**
+ * Create a new HttpClient instance for the NodeJS environment.
+ * @internal
+ */
+function createNodeHttpClient() {
+    return new NodeHttpClient();
+}
+//# sourceMappingURL=nodeHttpClient.js.map
+
+/***/ }),
+
+/***/ 7311:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createEmptyPipeline = createEmptyPipeline;
+const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]);
+/**
+ * A private implementation of Pipeline.
+ * Do not export this class from the package.
+ * @internal
+ */
+class HttpPipeline {
+    constructor(policies) {
+        var _a;
+        this._policies = [];
+        this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : [];
+        this._orderedPolicies = undefined;
+    }
+    addPolicy(policy, options = {}) {
+        if (options.phase && options.afterPhase) {
+            throw new Error("Policies inside a phase cannot specify afterPhase.");
+        }
+        if (options.phase && !ValidPhaseNames.has(options.phase)) {
+            throw new Error(`Invalid phase name: ${options.phase}`);
+        }
+        if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) {
+            throw new Error(`Invalid afterPhase name: ${options.afterPhase}`);
+        }
+        this._policies.push({
+            policy,
+            options,
+        });
+        this._orderedPolicies = undefined;
+    }
+    removePolicy(options) {
+        const removedPolicies = [];
+        this._policies = this._policies.filter((policyDescriptor) => {
+            if ((options.name && policyDescriptor.policy.name === options.name) ||
+                (options.phase && policyDescriptor.options.phase === options.phase)) {
+                removedPolicies.push(policyDescriptor.policy);
+                return false;
+            }
+            else {
+                return true;
+            }
+        });
+        this._orderedPolicies = undefined;
+        return removedPolicies;
+    }
+    sendRequest(httpClient, request) {
+        const policies = this.getOrderedPolicies();
+        const pipeline = policies.reduceRight((next, policy) => {
+            return (req) => {
+                return policy.sendRequest(req, next);
+            };
+        }, (req) => httpClient.sendRequest(req));
+        return pipeline(request);
+    }
+    getOrderedPolicies() {
+        if (!this._orderedPolicies) {
+            this._orderedPolicies = this.orderPolicies();
+        }
+        return this._orderedPolicies;
+    }
+    clone() {
+        return new HttpPipeline(this._policies);
+    }
+    static create() {
+        return new HttpPipeline();
+    }
+    orderPolicies() {
+        /**
+         * The goal of this method is to reliably order pipeline policies
+         * based on their declared requirements when they were added.
+         *
+         * Order is first determined by phase:
+         *
+         * 1. Serialize Phase
+         * 2. Policies not in a phase
+         * 3. Deserialize Phase
+         * 4. Retry Phase
+         * 5. Sign Phase
+         *
+         * Within each phase, policies are executed in the order
+         * they were added unless they were specified to execute
+         * before/after other policies or after a particular phase.
+         *
+         * To determine the final order, we will walk the policy list
+         * in phase order multiple times until all dependencies are
+         * satisfied.
+         *
+         * `afterPolicies` are the set of policies that must be
+         * executed before a given policy. This requirement is
+         * considered satisfied when each of the listed policies
+         * have been scheduled.
+         *
+         * `beforePolicies` are the set of policies that must be
+         * executed after a given policy. Since this dependency
+         * can be expressed by converting it into a equivalent
+         * `afterPolicies` declarations, they are normalized
+         * into that form for simplicity.
+         *
+         * An `afterPhase` dependency is considered satisfied when all
+         * policies in that phase have scheduled.
+         *
+         */
+        const result = [];
+        // Track all policies we know about.
+        const policyMap = new Map();
+        function createPhase(name) {
+            return {
+                name,
+                policies: new Set(),
+                hasRun: false,
+                hasAfterPolicies: false,
+            };
+        }
+        // Track policies for each phase.
+        const serializePhase = createPhase("Serialize");
+        const noPhase = createPhase("None");
+        const deserializePhase = createPhase("Deserialize");
+        const retryPhase = createPhase("Retry");
+        const signPhase = createPhase("Sign");
+        // a list of phases in order
+        const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase];
+        // Small helper function to map phase name to each Phase
+        function getPhase(phase) {
+            if (phase === "Retry") {
+                return retryPhase;
+            }
+            else if (phase === "Serialize") {
+                return serializePhase;
+            }
+            else if (phase === "Deserialize") {
+                return deserializePhase;
+            }
+            else if (phase === "Sign") {
+                return signPhase;
+            }
+            else {
+                return noPhase;
+            }
+        }
+        // First walk each policy and create a node to track metadata.
+        for (const descriptor of this._policies) {
+            const policy = descriptor.policy;
+            const options = descriptor.options;
+            const policyName = policy.name;
+            if (policyMap.has(policyName)) {
+                throw new Error("Duplicate policy names not allowed in pipeline");
+            }
+            const node = {
+                policy,
+                dependsOn: new Set(),
+                dependants: new Set(),
+            };
+            if (options.afterPhase) {
+                node.afterPhase = getPhase(options.afterPhase);
+                node.afterPhase.hasAfterPolicies = true;
+            }
+            policyMap.set(policyName, node);
+            const phase = getPhase(options.phase);
+            phase.policies.add(node);
+        }
+        // Now that each policy has a node, connect dependency references.
+        for (const descriptor of this._policies) {
+            const { policy, options } = descriptor;
+            const policyName = policy.name;
+            const node = policyMap.get(policyName);
+            if (!node) {
+                throw new Error(`Missing node for policy ${policyName}`);
+            }
+            if (options.afterPolicies) {
+                for (const afterPolicyName of options.afterPolicies) {
+                    const afterNode = policyMap.get(afterPolicyName);
+                    if (afterNode) {
+                        // Linking in both directions helps later
+                        // when we want to notify dependants.
+                        node.dependsOn.add(afterNode);
+                        afterNode.dependants.add(node);
+                    }
+                }
+            }
+            if (options.beforePolicies) {
+                for (const beforePolicyName of options.beforePolicies) {
+                    const beforeNode = policyMap.get(beforePolicyName);
+                    if (beforeNode) {
+                        // To execute before another node, make it
+                        // depend on the current node.
+                        beforeNode.dependsOn.add(node);
+                        node.dependants.add(beforeNode);
+                    }
+                }
+            }
+        }
+        function walkPhase(phase) {
+            phase.hasRun = true;
+            // Sets iterate in insertion order
+            for (const node of phase.policies) {
+                if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) {
+                    // If this node is waiting on a phase to complete,
+                    // we need to skip it for now.
+                    // Even if the phase is empty, we should wait for it
+                    // to be walked to avoid re-ordering policies.
+                    continue;
+                }
+                if (node.dependsOn.size === 0) {
+                    // If there's nothing else we're waiting for, we can
+                    // add this policy to the result list.
+                    result.push(node.policy);
+                    // Notify anything that depends on this policy that
+                    // the policy has been scheduled.
+                    for (const dependant of node.dependants) {
+                        dependant.dependsOn.delete(node);
+                    }
+                    policyMap.delete(node.policy.name);
+                    phase.policies.delete(node);
+                }
+            }
+        }
+        function walkPhases() {
+            for (const phase of orderedPhases) {
+                walkPhase(phase);
+                // if the phase isn't complete
+                if (phase.policies.size > 0 && phase !== noPhase) {
+                    if (!noPhase.hasRun) {
+                        // Try running noPhase to see if that unblocks this phase next tick.
+                        // This can happen if a phase that happens before noPhase
+                        // is waiting on a noPhase policy to complete.
+                        walkPhase(noPhase);
+                    }
+                    // Don't proceed to the next phase until this phase finishes.
+                    return;
+                }
+                if (phase.hasAfterPolicies) {
+                    // Run any policies unblocked by this phase
+                    walkPhase(noPhase);
+                }
+            }
+        }
+        // Iterate until we've put every node in the result list.
+        let iteration = 0;
+        while (policyMap.size > 0) {
+            iteration++;
+            const initialResultLength = result.length;
+            // Keep walking each phase in order until we can order every node.
+            walkPhases();
+            // The result list *should* get at least one larger each time
+            // after the first full pass.
+            // Otherwise, we're going to loop forever.
+            if (result.length <= initialResultLength && iteration > 1) {
+                throw new Error("Cannot satisfy policy dependencies due to requirements cycle.");
+            }
+        }
+        return result;
+    }
+}
+/**
+ * Creates a totally empty pipeline.
+ * Useful for testing or creating a custom one.
+ */
+function createEmptyPipeline() {
+    return HttpPipeline.create();
+}
+//# sourceMappingURL=pipeline.js.map
+
+/***/ }),
+
+/***/ 6386:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createPipelineRequest = createPipelineRequest;
+const httpHeaders_js_1 = __nccwpck_require__(6955);
+const core_util_1 = __nccwpck_require__(402);
+class PipelineRequestImpl {
+    constructor(options) {
+        var _a, _b, _c, _d, _e, _f, _g;
+        this.url = options.url;
+        this.body = options.body;
+        this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : (0, httpHeaders_js_1.createHttpHeaders)();
+        this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET";
+        this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0;
+        this.multipartBody = options.multipartBody;
+        this.formData = options.formData;
+        this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false;
+        this.proxySettings = options.proxySettings;
+        this.streamResponseStatusCodes = options.streamResponseStatusCodes;
+        this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false;
+        this.abortSignal = options.abortSignal;
+        this.tracingOptions = options.tracingOptions;
+        this.onUploadProgress = options.onUploadProgress;
+        this.onDownloadProgress = options.onDownloadProgress;
+        this.requestId = options.requestId || (0, core_util_1.randomUUID)();
+        this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false;
+        this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false;
+    }
+}
+/**
+ * Creates a new pipeline request with the given options.
+ * This method is to allow for the easy setting of default values and not required.
+ * @param options - The options to create the request with.
+ */
+function createPipelineRequest(options) {
+    return new PipelineRequestImpl(options);
+}
+//# sourceMappingURL=pipelineRequest.js.map
+
+/***/ }),
+
+/***/ 7179:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.auxiliaryAuthenticationHeaderPolicyName = void 0;
+exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy;
+const tokenCycler_js_1 = __nccwpck_require__(47);
+const log_js_1 = __nccwpck_require__(3747);
+/**
+ * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy.
+ */
+exports.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy";
+const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary";
+async function sendAuthorizeRequest(options) {
+    var _a, _b;
+    const { scopes, getAccessToken, request } = options;
+    const getTokenOptions = {
+        abortSignal: request.abortSignal,
+        tracingOptions: request.tracingOptions,
+    };
+    return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : "";
+}
+/**
+ * A policy for external tokens to `x-ms-authorization-auxiliary` header.
+ * This header will be used when creating a cross-tenant application we may need to handle authentication requests
+ * for resources that are in different tenants.
+ * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works
+ */
+function auxiliaryAuthenticationHeaderPolicy(options) {
+    const { credentials, scopes } = options;
+    const logger = options.logger || log_js_1.logger;
+    const tokenCyclerMap = new WeakMap();
+    return {
+        name: exports.auxiliaryAuthenticationHeaderPolicyName,
+        async sendRequest(request, next) {
+            if (!request.url.toLowerCase().startsWith("https://")) {
+                throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs.");
+            }
+            if (!credentials || credentials.length === 0) {
+                logger.info(`${exports.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`);
+                return next(request);
+            }
+            const tokenPromises = [];
+            for (const credential of credentials) {
+                let getAccessToken = tokenCyclerMap.get(credential);
+                if (!getAccessToken) {
+                    getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential);
+                    tokenCyclerMap.set(credential, getAccessToken);
+                }
+                tokenPromises.push(sendAuthorizeRequest({
+                    scopes: Array.isArray(scopes) ? scopes : [scopes],
+                    request,
+                    getAccessToken,
+                    logger,
+                }));
+            }
+            const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token));
+            if (auxiliaryTokens.length === 0) {
+                logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`);
+                return next(request);
+            }
+            request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", "));
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map
+
+/***/ }),
+
+/***/ 5700:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.bearerTokenAuthenticationPolicyName = void 0;
+exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy;
+exports.parseChallenges = parseChallenges;
+const tokenCycler_js_1 = __nccwpck_require__(47);
+const log_js_1 = __nccwpck_require__(3747);
+const restError_js_1 = __nccwpck_require__(4503);
+/**
+ * The programmatic identifier of the bearerTokenAuthenticationPolicy.
+ */
+exports.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy";
+/**
+ * Try to send the given request.
+ *
+ * When a response is received, returns a tuple of the response received and, if the response was received
+ * inside a thrown RestError, the RestError that was thrown.
+ *
+ * Otherwise, if an error was thrown while sending the request that did not provide an underlying response, it
+ * will be rethrown.
+ */
+async function trySendRequest(request, next) {
+    try {
+        return [await next(request), undefined];
+    }
+    catch (e) {
+        if ((0, restError_js_1.isRestError)(e) && e.response) {
+            return [e.response, e];
+        }
+        else {
+            throw e;
+        }
+    }
+}
+/**
+ * Default authorize request handler
+ */
+async function defaultAuthorizeRequest(options) {
+    const { scopes, getAccessToken, request } = options;
+    // Enable CAE true by default
+    const getTokenOptions = {
+        abortSignal: request.abortSignal,
+        tracingOptions: request.tracingOptions,
+        enableCae: true,
+    };
+    const accessToken = await getAccessToken(scopes, getTokenOptions);
+    if (accessToken) {
+        options.request.headers.set("Authorization", `Bearer ${accessToken.token}`);
+    }
+}
+/**
+ * We will retrieve the challenge only if the response status code was 401,
+ * and if the response contained the header "WWW-Authenticate" with a non-empty value.
+ */
+function isChallengeResponse(response) {
+    return response.status === 401 && response.headers.has("WWW-Authenticate");
+}
+/**
+ * Re-authorize the request for CAE challenge.
+ * The response containing the challenge is `options.response`.
+ * If this method returns true, the underlying request will be sent once again.
+ */
+async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) {
+    var _a;
+    const { scopes } = onChallengeOptions;
+    const accessToken = await onChallengeOptions.getAccessToken(scopes, {
+        enableCae: true,
+        claims: caeClaims,
+    });
+    if (!accessToken) {
+        return false;
+    }
+    onChallengeOptions.request.headers.set("Authorization", `${(_a = accessToken.tokenType) !== null && _a !== void 0 ? _a : "Bearer"} ${accessToken.token}`);
+    return true;
+}
+/**
+ * A policy that can request a token from a TokenCredential implementation and
+ * then apply it to the Authorization header of a request as a Bearer token.
+ */
+function bearerTokenAuthenticationPolicy(options) {
+    var _a;
+    const { credential, scopes, challengeCallbacks } = options;
+    const logger = options.logger || log_js_1.logger;
+    const callbacks = {
+        authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest,
+        authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge,
+    };
+    // This function encapsulates the entire process of reliably retrieving the token
+    // The options are left out of the public API until there's demand to configure this.
+    // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions`
+    // in order to pass through the `options` object.
+    const getAccessToken = credential
+        ? (0, tokenCycler_js_1.createTokenCycler)(credential /* , options */)
+        : () => Promise.resolve(null);
+    return {
+        name: exports.bearerTokenAuthenticationPolicyName,
+        /**
+         * If there's no challenge parameter:
+         * - It will try to retrieve the token using the cache, or the credential's getToken.
+         * - Then it will try the next policy with or without the retrieved token.
+         *
+         * It uses the challenge parameters to:
+         * - Skip a first attempt to get the token from the credential if there's no cached token,
+         *   since it expects the token to be retrievable only after the challenge.
+         * - Prepare the outgoing request if the `prepareRequest` method has been provided.
+         * - Send an initial request to receive the challenge if it fails.
+         * - Process a challenge if the response contains it.
+         * - Retrieve a token with the challenge information, then re-send the request.
+         */
+        async sendRequest(request, next) {
+            if (!request.url.toLowerCase().startsWith("https://")) {
+                throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.");
+            }
+            await callbacks.authorizeRequest({
+                scopes: Array.isArray(scopes) ? scopes : [scopes],
+                request,
+                getAccessToken,
+                logger,
+            });
+            let response;
+            let error;
+            let shouldSendRequest;
+            [response, error] = await trySendRequest(request, next);
+            if (isChallengeResponse(response)) {
+                let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate"));
+                // Handle CAE by default when receive CAE claim
+                if (claims) {
+                    let parsedClaim;
+                    // Return the response immediately if claims is not a valid base64 encoded string
+                    try {
+                        parsedClaim = atob(claims);
+                    }
+                    catch (e) {
+                        logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`);
+                        return response;
+                    }
+                    shouldSendRequest = await authorizeRequestOnCaeChallenge({
+                        scopes: Array.isArray(scopes) ? scopes : [scopes],
+                        response,
+                        request,
+                        getAccessToken,
+                        logger,
+                    }, parsedClaim);
+                    // Send updated request and handle response for RestError
+                    if (shouldSendRequest) {
+                        [response, error] = await trySendRequest(request, next);
+                    }
+                }
+                else if (callbacks.authorizeRequestOnChallenge) {
+                    // Handle custom challenges when client provides custom callback
+                    shouldSendRequest = await callbacks.authorizeRequestOnChallenge({
+                        scopes: Array.isArray(scopes) ? scopes : [scopes],
+                        request,
+                        response,
+                        getAccessToken,
+                        logger,
+                    });
+                    // Send updated request and handle response for RestError
+                    if (shouldSendRequest) {
+                        [response, error] = await trySendRequest(request, next);
+                    }
+                    // If we get another CAE Claim, we will handle it by default and return whatever value we receive for this
+                    if (isChallengeResponse(response)) {
+                        claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate"));
+                        if (claims) {
+                            let parsedClaim;
+                            try {
+                                parsedClaim = atob(claims);
+                            }
+                            catch (e) {
+                                logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`);
+                                return response;
+                            }
+                            shouldSendRequest = await authorizeRequestOnCaeChallenge({
+                                scopes: Array.isArray(scopes) ? scopes : [scopes],
+                                response,
+                                request,
+                                getAccessToken,
+                                logger,
+                            }, parsedClaim);
+                            // Send updated request and handle response for RestError
+                            if (shouldSendRequest) {
+                                [response, error] = await trySendRequest(request, next);
+                            }
+                        }
+                    }
+                }
+            }
+            if (error) {
+                throw error;
+            }
+            else {
+                return response;
+            }
+        },
+    };
+}
+/**
+ * Converts: `Bearer a="b", c="d", Pop e="f", g="h"`.
+ * Into: `[ { scheme: 'Bearer', params: { a: 'b', c: 'd' } }, { scheme: 'Pop', params: { e: 'f', g: 'h' } } ]`.
+ *
+ * @internal
+ */
+function parseChallenges(challenges) {
+    // Challenge regex seperates the string to individual challenges with different schemes in the format `Scheme a="b", c=d`
+    // The challenge regex captures parameteres with either quotes values or unquoted values
+    const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g;
+    // Parameter regex captures the claims group removed from the scheme in the format `a="b"` and `c="d"`
+    // CAE challenge always have quoted parameters. For more reference, https://learn.microsoft.com/entra/identity-platform/claims-challenge
+    const paramRegex = /(\w+)="([^"]*)"/g;
+    const parsedChallenges = [];
+    let match;
+    // Iterate over each challenge match
+    while ((match = challengeRegex.exec(challenges)) !== null) {
+        const scheme = match[1];
+        const paramsString = match[2];
+        const params = {};
+        let paramMatch;
+        // Iterate over each parameter match
+        while ((paramMatch = paramRegex.exec(paramsString)) !== null) {
+            params[paramMatch[1]] = paramMatch[2];
+        }
+        parsedChallenges.push({ scheme, params });
+    }
+    return parsedChallenges;
+}
+/**
+ * Parse a pipeline response and look for a CAE challenge with "Bearer" scheme
+ * Return the value in the header without parsing the challenge
+ * @internal
+ */
+function getCaeChallengeClaims(challenges) {
+    var _a;
+    if (!challenges) {
+        return;
+    }
+    // Find all challenges present in the header
+    const parsedChallenges = parseChallenges(challenges);
+    return (_a = parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")) === null || _a === void 0 ? void 0 : _a.params.claims;
+}
+//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map
+
+/***/ }),
+
+/***/ 9712:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.decompressResponsePolicyName = void 0;
+exports.decompressResponsePolicy = decompressResponsePolicy;
+/**
+ * The programmatic identifier of the decompressResponsePolicy.
+ */
+exports.decompressResponsePolicyName = "decompressResponsePolicy";
+/**
+ * A policy to enable response decompression according to Accept-Encoding header
+ * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding
+ */
+function decompressResponsePolicy() {
+    return {
+        name: exports.decompressResponsePolicyName,
+        async sendRequest(request, next) {
+            // HEAD requests have no body
+            if (request.method !== "HEAD") {
+                request.headers.set("Accept-Encoding", "gzip,deflate");
+            }
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=decompressResponsePolicy.js.map
+
+/***/ }),
+
+/***/ 2122:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.defaultRetryPolicyName = void 0;
+exports.defaultRetryPolicy = defaultRetryPolicy;
+const exponentialRetryStrategy_js_1 = __nccwpck_require__(4695);
+const throttlingRetryStrategy_js_1 = __nccwpck_require__(3791);
+const retryPolicy_js_1 = __nccwpck_require__(2876);
+const constants_js_1 = __nccwpck_require__(1652);
+/**
+ * Name of the {@link defaultRetryPolicy}
+ */
+exports.defaultRetryPolicyName = "defaultRetryPolicy";
+/**
+ * A policy that retries according to three strategies:
+ * - When the server sends a 429 response with a Retry-After header.
+ * - When there are errors in the underlying transport layer (e.g. DNS lookup failures).
+ * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay.
+ */
+function defaultRetryPolicy(options = {}) {
+    var _a;
+    return {
+        name: exports.defaultRetryPolicyName,
+        sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], {
+            maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT,
+        }).sendRequest,
+    };
+}
+//# sourceMappingURL=defaultRetryPolicy.js.map
+
+/***/ }),
+
+/***/ 9447:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.exponentialRetryPolicyName = void 0;
+exports.exponentialRetryPolicy = exponentialRetryPolicy;
+const exponentialRetryStrategy_js_1 = __nccwpck_require__(4695);
+const retryPolicy_js_1 = __nccwpck_require__(2876);
+const constants_js_1 = __nccwpck_require__(1652);
+/**
+ * The programmatic identifier of the exponentialRetryPolicy.
+ */
+exports.exponentialRetryPolicyName = "exponentialRetryPolicy";
+/**
+ * A policy that attempts to retry requests while introducing an exponentially increasing delay.
+ * @param options - Options that configure retry logic.
+ */
+function exponentialRetryPolicy(options = {}) {
+    var _a;
+    return (0, retryPolicy_js_1.retryPolicy)([
+        (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })),
+    ], {
+        maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT,
+    });
+}
+//# sourceMappingURL=exponentialRetryPolicy.js.map
+
+/***/ }),
+
+/***/ 7870:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.formDataPolicyName = void 0;
+exports.formDataPolicy = formDataPolicy;
+const core_util_1 = __nccwpck_require__(402);
+const httpHeaders_js_1 = __nccwpck_require__(6955);
+/**
+ * The programmatic identifier of the formDataPolicy.
+ */
+exports.formDataPolicyName = "formDataPolicy";
+function formDataToFormDataMap(formData) {
+    var _a;
+    const formDataMap = {};
+    for (const [key, value] of formData.entries()) {
+        (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []);
+        formDataMap[key].push(value);
+    }
+    return formDataMap;
+}
+/**
+ * A policy that encodes FormData on the request into the body.
+ */
+function formDataPolicy() {
+    return {
+        name: exports.formDataPolicyName,
+        async sendRequest(request, next) {
+            if (core_util_1.isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) {
+                request.formData = formDataToFormDataMap(request.body);
+                request.body = undefined;
+            }
+            if (request.formData) {
+                const contentType = request.headers.get("Content-Type");
+                if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) {
+                    request.body = wwwFormUrlEncode(request.formData);
+                }
+                else {
+                    await prepareFormData(request.formData, request);
+                }
+                request.formData = undefined;
+            }
+            return next(request);
+        },
+    };
+}
+function wwwFormUrlEncode(formData) {
+    const urlSearchParams = new URLSearchParams();
+    for (const [key, value] of Object.entries(formData)) {
+        if (Array.isArray(value)) {
+            for (const subValue of value) {
+                urlSearchParams.append(key, subValue.toString());
+            }
+        }
+        else {
+            urlSearchParams.append(key, value.toString());
+        }
+    }
+    return urlSearchParams.toString();
+}
+async function prepareFormData(formData, request) {
+    // validate content type (multipart/form-data)
+    const contentType = request.headers.get("Content-Type");
+    if (contentType && !contentType.startsWith("multipart/form-data")) {
+        // content type is specified and is not multipart/form-data. Exit.
+        return;
+    }
+    request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data");
+    // set body to MultipartRequestBody using content from FormDataMap
+    const parts = [];
+    for (const [fieldName, values] of Object.entries(formData)) {
+        for (const value of Array.isArray(values) ? values : [values]) {
+            if (typeof value === "string") {
+                parts.push({
+                    headers: (0, httpHeaders_js_1.createHttpHeaders)({
+                        "Content-Disposition": `form-data; name="${fieldName}"`,
+                    }),
+                    body: (0, core_util_1.stringToUint8Array)(value, "utf-8"),
+                });
+            }
+            else if (value === undefined || value === null || typeof value !== "object") {
+                throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`);
+            }
+            else {
+                // using || instead of ?? here since if value.name is empty we should create a file name
+                const fileName = value.name || "blob";
+                const headers = (0, httpHeaders_js_1.createHttpHeaders)();
+                headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`);
+                // again, || is used since an empty value.type means the content type is unset
+                headers.set("Content-Type", value.type || "application/octet-stream");
+                parts.push({
+                    headers,
+                    body: value,
+                });
+            }
+        }
+    }
+    request.multipartBody = { parts };
+}
+//# sourceMappingURL=formDataPolicy.js.map
+
+/***/ }),
+
+/***/ 9060:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.logPolicyName = void 0;
+exports.logPolicy = logPolicy;
+const log_js_1 = __nccwpck_require__(3747);
+const sanitizer_js_1 = __nccwpck_require__(6825);
+/**
+ * The programmatic identifier of the logPolicy.
+ */
+exports.logPolicyName = "logPolicy";
+/**
+ * A policy that logs all requests and responses.
+ * @param options - Options to configure logPolicy.
+ */
+function logPolicy(options = {}) {
+    var _a;
+    const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : log_js_1.logger.info;
+    const sanitizer = new sanitizer_js_1.Sanitizer({
+        additionalAllowedHeaderNames: options.additionalAllowedHeaderNames,
+        additionalAllowedQueryParameters: options.additionalAllowedQueryParameters,
+    });
+    return {
+        name: exports.logPolicyName,
+        async sendRequest(request, next) {
+            if (!logger.enabled) {
+                return next(request);
+            }
+            logger(`Request: ${sanitizer.sanitize(request)}`);
+            const response = await next(request);
+            logger(`Response status code: ${response.status}`);
+            logger(`Headers: ${sanitizer.sanitize(response.headers)}`);
+            return response;
+        },
+    };
+}
+//# sourceMappingURL=logPolicy.js.map
+
+/***/ }),
+
+/***/ 2730:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.multipartPolicyName = void 0;
+exports.multipartPolicy = multipartPolicy;
+const core_util_1 = __nccwpck_require__(402);
+const concat_js_1 = __nccwpck_require__(6404);
+const typeGuards_js_1 = __nccwpck_require__(3246);
+function generateBoundary() {
+    return `----AzSDKFormBoundary${(0, core_util_1.randomUUID)()}`;
+}
+function encodeHeaders(headers) {
+    let result = "";
+    for (const [key, value] of headers) {
+        result += `${key}: ${value}\r\n`;
+    }
+    return result;
+}
+function getLength(source) {
+    if (source instanceof Uint8Array) {
+        return source.byteLength;
+    }
+    else if ((0, typeGuards_js_1.isBlob)(source)) {
+        // if was created using createFile then -1 means we have an unknown size
+        return source.size === -1 ? undefined : source.size;
+    }
+    else {
+        return undefined;
+    }
+}
+function getTotalLength(sources) {
+    let total = 0;
+    for (const source of sources) {
+        const partLength = getLength(source);
+        if (partLength === undefined) {
+            return undefined;
+        }
+        else {
+            total += partLength;
+        }
+    }
+    return total;
+}
+async function buildRequestBody(request, parts, boundary) {
+    const sources = [
+        (0, core_util_1.stringToUint8Array)(`--${boundary}`, "utf-8"),
+        ...parts.flatMap((part) => [
+            (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"),
+            (0, core_util_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"),
+            (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"),
+            part.body,
+            (0, core_util_1.stringToUint8Array)(`\r\n--${boundary}`, "utf-8"),
+        ]),
+        (0, core_util_1.stringToUint8Array)("--\r\n\r\n", "utf-8"),
+    ];
+    const contentLength = getTotalLength(sources);
+    if (contentLength) {
+        request.headers.set("Content-Length", contentLength);
+    }
+    request.body = await (0, concat_js_1.concat)(sources);
+}
+/**
+ * Name of multipart policy
+ */
+exports.multipartPolicyName = "multipartPolicy";
+const maxBoundaryLength = 70;
+const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`);
+function assertValidBoundary(boundary) {
+    if (boundary.length > maxBoundaryLength) {
+        throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`);
+    }
+    if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) {
+        throw new Error(`Multipart boundary "${boundary}" contains invalid characters`);
+    }
+}
+/**
+ * Pipeline policy for multipart requests
+ */
+function multipartPolicy() {
+    return {
+        name: exports.multipartPolicyName,
+        async sendRequest(request, next) {
+            var _a;
+            if (!request.multipartBody) {
+                return next(request);
+            }
+            if (request.body) {
+                throw new Error("multipartBody and regular body cannot be set at the same time");
+            }
+            let boundary = request.multipartBody.boundary;
+            const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed";
+            const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/);
+            if (!parsedHeader) {
+                throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`);
+            }
+            const [, contentType, parsedBoundary] = parsedHeader;
+            if (parsedBoundary && boundary && parsedBoundary !== boundary) {
+                throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`);
+            }
+            boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary);
+            if (boundary) {
+                assertValidBoundary(boundary);
+            }
+            else {
+                boundary = generateBoundary();
+            }
+            request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`);
+            await buildRequestBody(request, request.multipartBody.parts, boundary);
+            request.multipartBody = undefined;
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=multipartPolicy.js.map
+
+/***/ }),
+
+/***/ 3864:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.ndJsonPolicyName = void 0;
+exports.ndJsonPolicy = ndJsonPolicy;
+/**
+ * The programmatic identifier of the ndJsonPolicy.
+ */
+exports.ndJsonPolicyName = "ndJsonPolicy";
+/**
+ * ndJsonPolicy is a policy used to control keep alive settings for every request.
+ */
+function ndJsonPolicy() {
+    return {
+        name: exports.ndJsonPolicyName,
+        async sendRequest(request, next) {
+            // There currently isn't a good way to bypass the serializer
+            if (typeof request.body === "string" && request.body.startsWith("[")) {
+                const body = JSON.parse(request.body);
+                if (Array.isArray(body)) {
+                    request.body = body.map((item) => JSON.stringify(item) + "\n").join("");
+                }
+            }
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=ndJsonPolicy.js.map
+
+/***/ }),
+
+/***/ 338:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.globalNoProxyList = exports.proxyPolicyName = void 0;
+exports.loadNoProxy = loadNoProxy;
+exports.getDefaultProxySettings = getDefaultProxySettings;
+exports.proxyPolicy = proxyPolicy;
+const https_proxy_agent_1 = __nccwpck_require__(4469);
+const http_proxy_agent_1 = __nccwpck_require__(4702);
+const log_js_1 = __nccwpck_require__(3747);
+const HTTPS_PROXY = "HTTPS_PROXY";
+const HTTP_PROXY = "HTTP_PROXY";
+const ALL_PROXY = "ALL_PROXY";
+const NO_PROXY = "NO_PROXY";
+/**
+ * The programmatic identifier of the proxyPolicy.
+ */
+exports.proxyPolicyName = "proxyPolicy";
+/**
+ * Stores the patterns specified in NO_PROXY environment variable.
+ * @internal
+ */
+exports.globalNoProxyList = [];
+let noProxyListLoaded = false;
+/** A cache of whether a host should bypass the proxy. */
+const globalBypassedMap = new Map();
+function getEnvironmentValue(name) {
+    if (process.env[name]) {
+        return process.env[name];
+    }
+    else if (process.env[name.toLowerCase()]) {
+        return process.env[name.toLowerCase()];
+    }
+    return undefined;
+}
+function loadEnvironmentProxyValue() {
+    if (!process) {
+        return undefined;
+    }
+    const httpsProxy = getEnvironmentValue(HTTPS_PROXY);
+    const allProxy = getEnvironmentValue(ALL_PROXY);
+    const httpProxy = getEnvironmentValue(HTTP_PROXY);
+    return httpsProxy || allProxy || httpProxy;
+}
+/**
+ * Check whether the host of a given `uri` matches any pattern in the no proxy list.
+ * If there's a match, any request sent to the same host shouldn't have the proxy settings set.
+ * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210
+ */
+function isBypassed(uri, noProxyList, bypassedMap) {
+    if (noProxyList.length === 0) {
+        return false;
+    }
+    const host = new URL(uri).hostname;
+    if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) {
+        return bypassedMap.get(host);
+    }
+    let isBypassedFlag = false;
+    for (const pattern of noProxyList) {
+        if (pattern[0] === ".") {
+            // This should match either domain it self or any subdomain or host
+            // .foo.com will match foo.com it self or *.foo.com
+            if (host.endsWith(pattern)) {
+                isBypassedFlag = true;
+            }
+            else {
+                if (host.length === pattern.length - 1 && host === pattern.slice(1)) {
+                    isBypassedFlag = true;
+                }
+            }
+        }
+        else {
+            if (host === pattern) {
+                isBypassedFlag = true;
+            }
+        }
+    }
+    bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag);
+    return isBypassedFlag;
+}
+function loadNoProxy() {
+    const noProxy = getEnvironmentValue(NO_PROXY);
+    noProxyListLoaded = true;
+    if (noProxy) {
+        return noProxy
+            .split(",")
+            .map((item) => item.trim())
+            .filter((item) => item.length);
+    }
+    return [];
+}
+/**
+ * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy.
+ * If no argument is given, it attempts to parse a proxy URL from the environment
+ * variables `HTTPS_PROXY` or `HTTP_PROXY`.
+ * @param proxyUrl - The url of the proxy to use. May contain authentication information.
+ * @deprecated - Internally this method is no longer necessary when setting proxy information.
+ */
+function getDefaultProxySettings(proxyUrl) {
+    if (!proxyUrl) {
+        proxyUrl = loadEnvironmentProxyValue();
+        if (!proxyUrl) {
+            return undefined;
+        }
+    }
+    const parsedUrl = new URL(proxyUrl);
+    const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : "";
+    return {
+        host: schema + parsedUrl.hostname,
+        port: Number.parseInt(parsedUrl.port || "80"),
+        username: parsedUrl.username,
+        password: parsedUrl.password,
+    };
+}
+/**
+ * This method attempts to parse a proxy URL from the environment
+ * variables `HTTPS_PROXY` or `HTTP_PROXY`.
+ */
+function getDefaultProxySettingsInternal() {
+    const envProxy = loadEnvironmentProxyValue();
+    return envProxy ? new URL(envProxy) : undefined;
+}
+function getUrlFromProxySettings(settings) {
+    let parsedProxyUrl;
+    try {
+        parsedProxyUrl = new URL(settings.host);
+    }
+    catch (_a) {
+        throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`);
+    }
+    parsedProxyUrl.port = String(settings.port);
+    if (settings.username) {
+        parsedProxyUrl.username = settings.username;
+    }
+    if (settings.password) {
+        parsedProxyUrl.password = settings.password;
+    }
+    return parsedProxyUrl;
+}
+function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) {
+    // Custom Agent should take precedence so if one is present
+    // we should skip to avoid overwriting it.
+    if (request.agent) {
+        return;
+    }
+    const url = new URL(request.url);
+    const isInsecure = url.protocol !== "https:";
+    if (request.tlsSettings) {
+        log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored.");
+    }
+    const headers = request.headers.toJSON();
+    if (isInsecure) {
+        if (!cachedAgents.httpProxyAgent) {
+            cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers });
+        }
+        request.agent = cachedAgents.httpProxyAgent;
+    }
+    else {
+        if (!cachedAgents.httpsProxyAgent) {
+            cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers });
+        }
+        request.agent = cachedAgents.httpsProxyAgent;
+    }
+}
+/**
+ * A policy that allows one to apply proxy settings to all requests.
+ * If not passed static settings, they will be retrieved from the HTTPS_PROXY
+ * or HTTP_PROXY environment variables.
+ * @param proxySettings - ProxySettings to use on each request.
+ * @param options - additional settings, for example, custom NO_PROXY patterns
+ */
+function proxyPolicy(proxySettings, options) {
+    if (!noProxyListLoaded) {
+        exports.globalNoProxyList.push(...loadNoProxy());
+    }
+    const defaultProxy = proxySettings
+        ? getUrlFromProxySettings(proxySettings)
+        : getDefaultProxySettingsInternal();
+    const cachedAgents = {};
+    return {
+        name: exports.proxyPolicyName,
+        async sendRequest(request, next) {
+            var _a;
+            if (!request.proxySettings &&
+                defaultProxy &&
+                !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : exports.globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) {
+                setProxyAgentOnRequest(request, cachedAgents, defaultProxy);
+            }
+            else if (request.proxySettings) {
+                setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings));
+            }
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=proxyPolicy.js.map
+
+/***/ }),
+
+/***/ 4308:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.redirectPolicyName = void 0;
+exports.redirectPolicy = redirectPolicy;
+/**
+ * The programmatic identifier of the redirectPolicy.
+ */
+exports.redirectPolicyName = "redirectPolicy";
+/**
+ * Methods that are allowed to follow redirects 301 and 302
+ */
+const allowedRedirect = ["GET", "HEAD"];
+/**
+ * A policy to follow Location headers from the server in order
+ * to support server-side redirection.
+ * In the browser, this policy is not used.
+ * @param options - Options to control policy behavior.
+ */
+function redirectPolicy(options = {}) {
+    const { maxRetries = 20 } = options;
+    return {
+        name: exports.redirectPolicyName,
+        async sendRequest(request, next) {
+            const response = await next(request);
+            return handleRedirect(next, response, maxRetries);
+        },
+    };
+}
+async function handleRedirect(next, response, maxRetries, currentRetries = 0) {
+    const { request, status, headers } = response;
+    const locationHeader = headers.get("location");
+    if (locationHeader &&
+        (status === 300 ||
+            (status === 301 && allowedRedirect.includes(request.method)) ||
+            (status === 302 && allowedRedirect.includes(request.method)) ||
+            (status === 303 && request.method === "POST") ||
+            status === 307) &&
+        currentRetries < maxRetries) {
+        const url = new URL(locationHeader, request.url);
+        request.url = url.toString();
+        // POST request with Status code 303 should be converted into a
+        // redirected GET request if the redirect url is present in the location header
+        if (status === 303) {
+            request.method = "GET";
+            request.headers.delete("Content-Length");
+            delete request.body;
+        }
+        request.headers.delete("Authorization");
+        const res = await next(request);
+        return handleRedirect(next, res, maxRetries, currentRetries + 1);
+    }
+    return response;
+}
+//# sourceMappingURL=redirectPolicy.js.map
+
+/***/ }),
+
+/***/ 2876:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.retryPolicy = retryPolicy;
+const helpers_js_1 = __nccwpck_require__(6935);
+const logger_1 = __nccwpck_require__(5851);
+const abort_controller_1 = __nccwpck_require__(764);
+const constants_js_1 = __nccwpck_require__(1652);
+const retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy");
+/**
+ * The programmatic identifier of the retryPolicy.
+ */
+const retryPolicyName = "retryPolicy";
+/**
+ * retryPolicy is a generic policy to enable retrying requests when certain conditions are met
+ */
+function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) {
+    const logger = options.logger || retryPolicyLogger;
+    return {
+        name: retryPolicyName,
+        async sendRequest(request, next) {
+            var _a, _b;
+            let response;
+            let responseError;
+            let retryCount = -1;
+            // eslint-disable-next-line no-constant-condition
+            retryRequest: while (true) {
+                retryCount += 1;
+                response = undefined;
+                responseError = undefined;
+                try {
+                    logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId);
+                    response = await next(request);
+                    logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId);
+                }
+                catch (e) {
+                    logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId);
+                    // RestErrors are valid targets for the retry strategies.
+                    // If none of the retry strategies can work with them, they will be thrown later in this policy.
+                    // If the received error is not a RestError, it is immediately thrown.
+                    responseError = e;
+                    if (!e || responseError.name !== "RestError") {
+                        throw e;
+                    }
+                    response = responseError.response;
+                }
+                if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) {
+                    logger.error(`Retry ${retryCount}: Request aborted.`);
+                    const abortError = new abort_controller_1.AbortError();
+                    throw abortError;
+                }
+                if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) {
+                    logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`);
+                    if (responseError) {
+                        throw responseError;
+                    }
+                    else if (response) {
+                        return response;
+                    }
+                    else {
+                        throw new Error("Maximum retries reached with no response or error to throw");
+                    }
+                }
+                logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`);
+                strategiesLoop: for (const strategy of strategies) {
+                    const strategyLogger = strategy.logger || retryPolicyLogger;
+                    strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`);
+                    const modifiers = strategy.retry({
+                        retryCount,
+                        response,
+                        responseError,
+                    });
+                    if (modifiers.skipStrategy) {
+                        strategyLogger.info(`Retry ${retryCount}: Skipped.`);
+                        continue strategiesLoop;
+                    }
+                    const { errorToThrow, retryAfterInMs, redirectTo } = modifiers;
+                    if (errorToThrow) {
+                        strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow);
+                        throw errorToThrow;
+                    }
+                    if (retryAfterInMs || retryAfterInMs === 0) {
+                        strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`);
+                        await (0, helpers_js_1.delay)(retryAfterInMs, undefined, { abortSignal: request.abortSignal });
+                        continue retryRequest;
+                    }
+                    if (redirectTo) {
+                        strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`);
+                        request.url = redirectTo;
+                        continue retryRequest;
+                    }
+                }
+                if (responseError) {
+                    logger.info(`None of the retry strategies could work with the received error. Throwing it.`);
+                    throw responseError;
+                }
+                if (response) {
+                    logger.info(`None of the retry strategies could work with the received response. Returning it.`);
+                    return response;
+                }
+                // If all the retries skip and there's no response,
+                // we're still in the retry loop, so a new request will be sent
+                // until `maxRetries` is reached.
+            }
+        },
+    };
+}
+//# sourceMappingURL=retryPolicy.js.map
+
+/***/ }),
+
+/***/ 9417:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.setClientRequestIdPolicyName = void 0;
+exports.setClientRequestIdPolicy = setClientRequestIdPolicy;
+/**
+ * The programmatic identifier of the setClientRequestIdPolicy.
+ */
+exports.setClientRequestIdPolicyName = "setClientRequestIdPolicy";
+/**
+ * Each PipelineRequest gets a unique id upon creation.
+ * This policy passes that unique id along via an HTTP header to enable better
+ * telemetry and tracing.
+ * @param requestIdHeaderName - The name of the header to pass the request ID to.
+ */
+function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") {
+    return {
+        name: exports.setClientRequestIdPolicyName,
+        async sendRequest(request, next) {
+            if (!request.headers.has(requestIdHeaderName)) {
+                request.headers.set(requestIdHeaderName, request.requestId);
+            }
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=setClientRequestIdPolicy.js.map
+
+/***/ }),
+
+/***/ 985:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.systemErrorRetryPolicyName = void 0;
+exports.systemErrorRetryPolicy = systemErrorRetryPolicy;
+const exponentialRetryStrategy_js_1 = __nccwpck_require__(4695);
+const retryPolicy_js_1 = __nccwpck_require__(2876);
+const constants_js_1 = __nccwpck_require__(1652);
+/**
+ * Name of the {@link systemErrorRetryPolicy}
+ */
+exports.systemErrorRetryPolicyName = "systemErrorRetryPolicy";
+/**
+ * A retry policy that specifically seeks to handle errors in the
+ * underlying transport layer (e.g. DNS lookup failures) rather than
+ * retryable error codes from the server itself.
+ * @param options - Options that customize the policy.
+ */
+function systemErrorRetryPolicy(options = {}) {
+    var _a;
+    return {
+        name: exports.systemErrorRetryPolicyName,
+        sendRequest: (0, retryPolicy_js_1.retryPolicy)([
+            (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })),
+        ], {
+            maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT,
+        }).sendRequest,
+    };
+}
+//# sourceMappingURL=systemErrorRetryPolicy.js.map
+
+/***/ }),
+
+/***/ 5237:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.throttlingRetryPolicyName = void 0;
+exports.throttlingRetryPolicy = throttlingRetryPolicy;
+const throttlingRetryStrategy_js_1 = __nccwpck_require__(3791);
+const retryPolicy_js_1 = __nccwpck_require__(2876);
+const constants_js_1 = __nccwpck_require__(1652);
+/**
+ * Name of the {@link throttlingRetryPolicy}
+ */
+exports.throttlingRetryPolicyName = "throttlingRetryPolicy";
+/**
+ * A policy that retries when the server sends a 429 response with a Retry-After header.
+ *
+ * To learn more, please refer to
+ * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,
+ * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and
+ * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors
+ *
+ * @param options - Options that configure retry logic.
+ */
+function throttlingRetryPolicy(options = {}) {
+    var _a;
+    return {
+        name: exports.throttlingRetryPolicyName,
+        sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], {
+            maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT,
+        }).sendRequest,
+    };
+}
+//# sourceMappingURL=throttlingRetryPolicy.js.map
+
+/***/ }),
+
+/***/ 6551:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.tlsPolicyName = void 0;
+exports.tlsPolicy = tlsPolicy;
+/**
+ * Name of the TLS Policy
+ */
+exports.tlsPolicyName = "tlsPolicy";
+/**
+ * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication.
+ */
+function tlsPolicy(tlsSettings) {
+    return {
+        name: exports.tlsPolicyName,
+        sendRequest: async (req, next) => {
+            // Users may define a request tlsSettings, honor those over the client level one
+            if (!req.tlsSettings) {
+                req.tlsSettings = tlsSettings;
+            }
+            return next(req);
+        },
+    };
+}
+//# sourceMappingURL=tlsPolicy.js.map
+
+/***/ }),
+
+/***/ 4396:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.tracingPolicyName = void 0;
+exports.tracingPolicy = tracingPolicy;
+const core_tracing_1 = __nccwpck_require__(9340);
+const constants_js_1 = __nccwpck_require__(1652);
+const userAgent_js_1 = __nccwpck_require__(286);
+const log_js_1 = __nccwpck_require__(3747);
+const core_util_1 = __nccwpck_require__(402);
+const restError_js_1 = __nccwpck_require__(4503);
+const sanitizer_js_1 = __nccwpck_require__(6825);
+/**
+ * The programmatic identifier of the tracingPolicy.
+ */
+exports.tracingPolicyName = "tracingPolicy";
+/**
+ * A simple policy to create OpenTelemetry Spans for each request made by the pipeline
+ * that has SpanOptions with a parent.
+ * Requests made without a parent Span will not be recorded.
+ * @param options - Options to configure the telemetry logged by the tracing policy.
+ */
+function tracingPolicy(options = {}) {
+    const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix);
+    const sanitizer = new sanitizer_js_1.Sanitizer({
+        additionalAllowedQueryParameters: options.additionalAllowedQueryParameters,
+    });
+    const tracingClient = tryCreateTracingClient();
+    return {
+        name: exports.tracingPolicyName,
+        async sendRequest(request, next) {
+            var _a;
+            if (!tracingClient) {
+                return next(request);
+            }
+            const userAgent = await userAgentPromise;
+            const spanAttributes = {
+                "http.url": sanitizer.sanitizeUrl(request.url),
+                "http.method": request.method,
+                "http.user_agent": userAgent,
+                requestId: request.requestId,
+            };
+            if (userAgent) {
+                spanAttributes["http.user_agent"] = userAgent;
+            }
+            const { span, tracingContext } = (_a = tryCreateSpan(tracingClient, request, spanAttributes)) !== null && _a !== void 0 ? _a : {};
+            if (!span || !tracingContext) {
+                return next(request);
+            }
+            try {
+                const response = await tracingClient.withContext(tracingContext, next, request);
+                tryProcessResponse(span, response);
+                return response;
+            }
+            catch (err) {
+                tryProcessError(span, err);
+                throw err;
+            }
+        },
+    };
+}
+function tryCreateTracingClient() {
+    try {
+        return (0, core_tracing_1.createTracingClient)({
+            namespace: "",
+            packageName: "@azure/core-rest-pipeline",
+            packageVersion: constants_js_1.SDK_VERSION,
+        });
+    }
+    catch (e) {
+        log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`);
+        return undefined;
+    }
+}
+function tryCreateSpan(tracingClient, request, spanAttributes) {
+    try {
+        // As per spec, we do not need to differentiate between HTTP and HTTPS in span name.
+        const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, {
+            spanKind: "client",
+            spanAttributes,
+        });
+        // If the span is not recording, don't do any more work.
+        if (!span.isRecording()) {
+            span.end();
+            return undefined;
+        }
+        // set headers
+        const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext);
+        for (const [key, value] of Object.entries(headers)) {
+            request.headers.set(key, value);
+        }
+        return { span, tracingContext: updatedOptions.tracingOptions.tracingContext };
+    }
+    catch (e) {
+        log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`);
+        return undefined;
+    }
+}
+function tryProcessError(span, error) {
+    try {
+        span.setStatus({
+            status: "error",
+            error: (0, core_util_1.isError)(error) ? error : undefined,
+        });
+        if ((0, restError_js_1.isRestError)(error) && error.statusCode) {
+            span.setAttribute("http.status_code", error.statusCode);
+        }
+        span.end();
+    }
+    catch (e) {
+        log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`);
+    }
+}
+function tryProcessResponse(span, response) {
+    try {
+        span.setAttribute("http.status_code", response.status);
+        const serviceRequestId = response.headers.get("x-ms-request-id");
+        if (serviceRequestId) {
+            span.setAttribute("serviceRequestId", serviceRequestId);
+        }
+        span.setStatus({
+            status: "success",
+        });
+        span.end();
+    }
+    catch (e) {
+        log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`);
+    }
+}
+//# sourceMappingURL=tracingPolicy.js.map
+
+/***/ }),
+
+/***/ 9398:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.userAgentPolicyName = void 0;
+exports.userAgentPolicy = userAgentPolicy;
+const userAgent_js_1 = __nccwpck_require__(286);
+const UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)();
+/**
+ * The programmatic identifier of the userAgentPolicy.
+ */
+exports.userAgentPolicyName = "userAgentPolicy";
+/**
+ * A policy that sets the User-Agent header (or equivalent) to reflect
+ * the library version.
+ * @param options - Options to customize the user agent value.
+ */
+function userAgentPolicy(options = {}) {
+    const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix);
+    return {
+        name: exports.userAgentPolicyName,
+        async sendRequest(request, next) {
+            if (!request.headers.has(UserAgentHeaderName)) {
+                request.headers.set(UserAgentHeaderName, await userAgentValue);
+            }
+            return next(request);
+        },
+    };
+}
+//# sourceMappingURL=userAgentPolicy.js.map
+
+/***/ }),
+
+/***/ 4503:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.RestError = void 0;
+exports.isRestError = isRestError;
+const core_util_1 = __nccwpck_require__(402);
+const inspect_js_1 = __nccwpck_require__(6002);
+const sanitizer_js_1 = __nccwpck_require__(6825);
+const errorSanitizer = new sanitizer_js_1.Sanitizer();
+/**
+ * A custom error type for failed pipeline requests.
+ */
+class RestError extends Error {
+    constructor(message, options = {}) {
+        super(message);
+        this.name = "RestError";
+        this.code = options.code;
+        this.statusCode = options.statusCode;
+        // The request and response may contain sensitive information in the headers or body.
+        // To help prevent this sensitive information being accidentally logged, the request and response
+        // properties are marked as non-enumerable here. This prevents them showing up in the output of
+        // JSON.stringify and console.log.
+        Object.defineProperty(this, "request", { value: options.request, enumerable: false });
+        Object.defineProperty(this, "response", { value: options.response, enumerable: false });
+        Object.setPrototypeOf(this, RestError.prototype);
+    }
+    /**
+     * Logging method for util.inspect in Node
+     */
+    [inspect_js_1.custom]() {
+        // Extract non-enumerable properties and add them back. This is OK since in this output the request and
+        // response get sanitized.
+        return `RestError: ${this.message} \n ${errorSanitizer.sanitize(Object.assign(Object.assign({}, this), { request: this.request, response: this.response }))}`;
+    }
+}
+exports.RestError = RestError;
+/**
+ * Something went wrong when making the request.
+ * This means the actual request failed for some reason,
+ * such as a DNS issue or the connection being lost.
+ */
+RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR";
+/**
+ * This means that parsing the response from the server failed.
+ * It may have been malformed.
+ */
+RestError.PARSE_ERROR = "PARSE_ERROR";
+/**
+ * Typeguard for RestError
+ * @param e - Something caught by a catch clause.
+ */
+function isRestError(e) {
+    if (e instanceof RestError) {
+        return true;
+    }
+    return (0, core_util_1.isError)(e) && e.name === "RestError";
+}
+//# sourceMappingURL=restError.js.map
+
+/***/ }),
+
+/***/ 4695:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.exponentialRetryStrategy = exponentialRetryStrategy;
+exports.isExponentialRetryResponse = isExponentialRetryResponse;
+exports.isSystemError = isSystemError;
+const core_util_1 = __nccwpck_require__(402);
+const throttlingRetryStrategy_js_1 = __nccwpck_require__(3791);
+// intervals are in milliseconds
+const DEFAULT_CLIENT_RETRY_INTERVAL = 1000;
+const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64;
+/**
+ * A retry strategy that retries with an exponentially increasing delay in these two cases:
+ * - When there are errors in the underlying transport layer (e.g. DNS lookup failures).
+ * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505).
+ */
+function exponentialRetryStrategy(options = {}) {
+    var _a, _b;
+    const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL;
+    const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;
+    return {
+        name: "exponentialRetryStrategy",
+        retry({ retryCount, response, responseError }) {
+            const matchedSystemError = isSystemError(responseError);
+            const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors;
+            const isExponential = isExponentialRetryResponse(response);
+            const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes;
+            const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential);
+            if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) {
+                return { skipStrategy: true };
+            }
+            if (responseError && !matchedSystemError && !isExponential) {
+                return { errorToThrow: responseError };
+            }
+            return (0, core_util_1.calculateRetryDelay)(retryCount, {
+                retryDelayInMs: retryInterval,
+                maxRetryDelayInMs: maxRetryInterval,
+            });
+        },
+    };
+}
+/**
+ * A response is a retry response if it has status codes:
+ * - 408, or
+ * - Greater or equal than 500, except for 501 and 505.
+ */
+function isExponentialRetryResponse(response) {
+    return Boolean(response &&
+        response.status !== undefined &&
+        (response.status >= 500 || response.status === 408) &&
+        response.status !== 501 &&
+        response.status !== 505);
+}
+/**
+ * Determines whether an error from a pipeline response was triggered in the network layer.
+ */
+function isSystemError(err) {
+    if (!err) {
+        return false;
+    }
+    return (err.code === "ETIMEDOUT" ||
+        err.code === "ESOCKETTIMEDOUT" ||
+        err.code === "ECONNREFUSED" ||
+        err.code === "ECONNRESET" ||
+        err.code === "ENOENT" ||
+        err.code === "ENOTFOUND");
+}
+//# sourceMappingURL=exponentialRetryStrategy.js.map
+
+/***/ }),
+
+/***/ 3791:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isThrottlingRetryResponse = isThrottlingRetryResponse;
+exports.throttlingRetryStrategy = throttlingRetryStrategy;
+const helpers_js_1 = __nccwpck_require__(6935);
+/**
+ * The header that comes back from Azure services representing
+ * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry).
+ */
+const RetryAfterHeader = "Retry-After";
+/**
+ * The headers that come back from Azure services representing
+ * the amount of time (minimum) to wait to retry.
+ *
+ * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds
+ * "Retry-After" : seconds or timestamp
+ */
+const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader];
+/**
+ * A response is a throttling retry response if it has a throttling status code (429 or 503),
+ * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value.
+ *
+ * Returns the `retryAfterInMs` value if the response is a throttling retry response.
+ * If not throttling retry response, returns `undefined`.
+ *
+ * @internal
+ */
+function getRetryAfterInMs(response) {
+    if (!(response && [429, 503].includes(response.status)))
+        return undefined;
+    try {
+        // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After"
+        for (const header of AllRetryAfterHeaders) {
+            const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header);
+            if (retryAfterValue === 0 || retryAfterValue) {
+                // "Retry-After" header ==> seconds
+                // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds
+                const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1;
+                return retryAfterValue * multiplyingFactor; // in milli-seconds
+            }
+        }
+        // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds
+        const retryAfterHeader = response.headers.get(RetryAfterHeader);
+        if (!retryAfterHeader)
+            return;
+        const date = Date.parse(retryAfterHeader);
+        const diff = date - Date.now();
+        // negative diff would mean a date in the past, so retry asap with 0 milliseconds
+        return Number.isFinite(diff) ? Math.max(0, diff) : undefined;
+    }
+    catch (_a) {
+        return undefined;
+    }
+}
+/**
+ * A response is a retry response if it has a throttling status code (429 or 503),
+ * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value.
+ */
+function isThrottlingRetryResponse(response) {
+    return Number.isFinite(getRetryAfterInMs(response));
+}
+function throttlingRetryStrategy() {
+    return {
+        name: "throttlingRetryStrategy",
+        retry({ response }) {
+            const retryAfterInMs = getRetryAfterInMs(response);
+            if (!Number.isFinite(retryAfterInMs)) {
+                return { skipStrategy: true };
+            }
+            return {
+                retryAfterInMs,
+            };
+        },
+    };
+}
+//# sourceMappingURL=throttlingRetryStrategy.js.map
+
+/***/ }),
+
+/***/ 6404:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.concat = concat;
+const tslib_1 = __nccwpck_require__(1577);
+const node_stream_1 = __nccwpck_require__(7075);
+const typeGuards_js_1 = __nccwpck_require__(3246);
+const file_js_1 = __nccwpck_require__(5378);
+function streamAsyncIterator() {
+    return tslib_1.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() {
+        const reader = this.getReader();
+        try {
+            while (true) {
+                const { done, value } = yield tslib_1.__await(reader.read());
+                if (done) {
+                    return yield tslib_1.__await(void 0);
+                }
+                yield yield tslib_1.__await(value);
+            }
+        }
+        finally {
+            reader.releaseLock();
+        }
+    });
+}
+function makeAsyncIterable(webStream) {
+    if (!webStream[Symbol.asyncIterator]) {
+        webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream);
+    }
+    if (!webStream.values) {
+        webStream.values = streamAsyncIterator.bind(webStream);
+    }
+}
+function ensureNodeStream(stream) {
+    if (stream instanceof ReadableStream) {
+        makeAsyncIterable(stream);
+        return node_stream_1.Readable.fromWeb(stream);
+    }
+    else {
+        return stream;
+    }
+}
+function toStream(source) {
+    if (source instanceof Uint8Array) {
+        return node_stream_1.Readable.from(Buffer.from(source));
+    }
+    else if ((0, typeGuards_js_1.isBlob)(source)) {
+        return toStream((0, file_js_1.getRawContent)(source));
+    }
+    else {
+        return ensureNodeStream(source);
+    }
+}
+/**
+ * Utility function that concatenates a set of binary inputs into one combined output.
+ *
+ * @param sources - array of sources for the concatenation
+ * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs.
+ *           In browser, returns a `Blob` representing all the concatenated inputs.
+ *
+ * @internal
+ */
+async function concat(sources) {
+    return function () {
+        const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream);
+        return node_stream_1.Readable.from((function () {
+            return tslib_1.__asyncGenerator(this, arguments, function* () {
+                var _a, e_1, _b, _c;
+                for (const stream of streams) {
+                    try {
+                        for (var _d = true, stream_1 = (e_1 = void 0, tslib_1.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib_1.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) {
+                            _c = stream_1_1.value;
+                            _d = false;
+                            const chunk = _c;
+                            yield yield tslib_1.__await(chunk);
+                        }
+                    }
+                    catch (e_1_1) { e_1 = { error: e_1_1 }; }
+                    finally {
+                        try {
+                            if (!_d && !_a && (_b = stream_1.return)) yield tslib_1.__await(_b.call(stream_1));
+                        }
+                        finally { if (e_1) throw e_1.error; }
+                    }
+                }
+            });
+        })());
+    };
+}
+//# sourceMappingURL=concat.js.map
+
+/***/ }),
+
+/***/ 5378:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getRawContent = getRawContent;
+exports.createFileFromStream = createFileFromStream;
+exports.createFile = createFile;
+const core_util_1 = __nccwpck_require__(402);
+const typeGuards_js_1 = __nccwpck_require__(3246);
+const unimplementedMethods = {
+    arrayBuffer: () => {
+        throw new Error("Not implemented");
+    },
+    slice: () => {
+        throw new Error("Not implemented");
+    },
+    text: () => {
+        throw new Error("Not implemented");
+    },
+};
+/**
+ * Private symbol used as key on objects created using createFile containing the
+ * original source of the file object.
+ *
+ * This is used in Node to access the original Node stream without using Blob#stream, which
+ * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and
+ * Readable#to/fromWeb in Node versions we support:
+ * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14)
+ * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6)
+ *
+ * Once these versions are no longer supported, we may be able to stop doing this.
+ *
+ * @internal
+ */
+const rawContent = Symbol("rawContent");
+function hasRawContent(x) {
+    return typeof x[rawContent] === "function";
+}
+/**
+ * Extract the raw content from a given blob-like object. If the input was created using createFile
+ * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used.
+ * For true instances of Blob and File, returns the blob's content as a Web ReadableStream<Uint8Array>.
+ *
+ * @internal
+ */
+function getRawContent(blob) {
+    if (hasRawContent(blob)) {
+        return blob[rawContent]();
+    }
+    else {
+        return blob.stream();
+    }
+}
+/**
+ * Create an object that implements the File interface. This object is intended to be
+ * passed into RequestBodyType.formData, and is not guaranteed to work as expected in
+ * other situations.
+ *
+ * Use this function to:
+ * - Create a File object for use in RequestBodyType.formData in environments where the
+ *   global File object is unavailable.
+ * - Create a File-like object from a readable stream without reading the stream into memory.
+ *
+ * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is
+ *                  passed in a request's form data map, the stream will not be read into memory
+ *                  and instead will be streamed when the request is made. In the event of a retry, the
+ *                  stream needs to be read again, so this callback SHOULD return a fresh stream if possible.
+ * @param name - the name of the file.
+ * @param options - optional metadata about the file, e.g. file name, file size, MIME type.
+ */
+function createFileFromStream(stream, name, options = {}) {
+    var _a, _b, _c, _d;
+    return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => {
+            const s = stream();
+            if ((0, typeGuards_js_1.isNodeReadableStream)(s)) {
+                throw new Error("Not supported: a Node stream was provided as input to createFileFromStream.");
+            }
+            return s;
+        }, [rawContent]: stream });
+}
+/**
+ * Create an object that implements the File interface. This object is intended to be
+ * passed into RequestBodyType.formData, and is not guaranteed to work as expected in
+ * other situations.
+ *
+ * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable.
+ *
+ * @param content - the content of the file as a Uint8Array in memory.
+ * @param name - the name of the file.
+ * @param options - optional metadata about the file, e.g. file name, file size, MIME type.
+ */
+function createFile(content, name, options = {}) {
+    var _a, _b, _c;
+    if (core_util_1.isNodeLike) {
+        return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content });
+    }
+    else {
+        return new File([content], name, options);
+    }
+}
+//# sourceMappingURL=file.js.map
+
+/***/ }),
+
+/***/ 6935:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.delay = delay;
+exports.parseHeaderValueAsNumber = parseHeaderValueAsNumber;
+const abort_controller_1 = __nccwpck_require__(764);
+const StandardAbortMessage = "The operation was aborted.";
+/**
+ * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds.
+ * @param delayInMs - The number of milliseconds to be delayed.
+ * @param value - The value to be resolved with after a timeout of t milliseconds.
+ * @param options - The options for delay - currently abort options
+ *                  - abortSignal - The abortSignal associated with containing operation.
+ *                  - abortErrorMsg - The abort error message associated with containing operation.
+ * @returns Resolved promise
+ */
+function delay(delayInMs, value, options) {
+    return new Promise((resolve, reject) => {
+        let timer = undefined;
+        let onAborted = undefined;
+        const rejectOnAbort = () => {
+            return reject(new abort_controller_1.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage));
+        };
+        const removeListeners = () => {
+            if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) {
+                options.abortSignal.removeEventListener("abort", onAborted);
+            }
+        };
+        onAborted = () => {
+            if (timer) {
+                clearTimeout(timer);
+            }
+            removeListeners();
+            return rejectOnAbort();
+        };
+        if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) {
+            return rejectOnAbort();
+        }
+        timer = setTimeout(() => {
+            removeListeners();
+            resolve(value);
+        }, delayInMs);
+        if (options === null || options === void 0 ? void 0 : options.abortSignal) {
+            options.abortSignal.addEventListener("abort", onAborted);
+        }
+    });
+}
+/**
+ * @internal
+ * @returns the parsed value or undefined if the parsed value is invalid.
+ */
+function parseHeaderValueAsNumber(response, headerName) {
+    const value = response.headers.get(headerName);
+    if (!value)
+        return;
+    const valueAsNum = Number(value);
+    if (Number.isNaN(valueAsNum))
+        return;
+    return valueAsNum;
+}
+//# sourceMappingURL=helpers.js.map
+
+/***/ }),
+
+/***/ 6002:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.custom = void 0;
+const node_util_1 = __nccwpck_require__(7975);
+exports.custom = node_util_1.inspect.custom;
+//# sourceMappingURL=inspect.js.map
+
+/***/ }),
+
+/***/ 6825:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Sanitizer = void 0;
+const core_util_1 = __nccwpck_require__(402);
+const RedactedString = "REDACTED";
+// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts
+const defaultAllowedHeaderNames = [
+    "x-ms-client-request-id",
+    "x-ms-return-client-request-id",
+    "x-ms-useragent",
+    "x-ms-correlation-request-id",
+    "x-ms-request-id",
+    "client-request-id",
+    "ms-cv",
+    "return-client-request-id",
+    "traceparent",
+    "Access-Control-Allow-Credentials",
+    "Access-Control-Allow-Headers",
+    "Access-Control-Allow-Methods",
+    "Access-Control-Allow-Origin",
+    "Access-Control-Expose-Headers",
+    "Access-Control-Max-Age",
+    "Access-Control-Request-Headers",
+    "Access-Control-Request-Method",
+    "Origin",
+    "Accept",
+    "Accept-Encoding",
+    "Cache-Control",
+    "Connection",
+    "Content-Length",
+    "Content-Type",
+    "Date",
+    "ETag",
+    "Expires",
+    "If-Match",
+    "If-Modified-Since",
+    "If-None-Match",
+    "If-Unmodified-Since",
+    "Last-Modified",
+    "Pragma",
+    "Request-Id",
+    "Retry-After",
+    "Server",
+    "Transfer-Encoding",
+    "User-Agent",
+    "WWW-Authenticate",
+];
+const defaultAllowedQueryParameters = ["api-version"];
+/**
+ * @internal
+ */
+class Sanitizer {
+    constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) {
+        allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames);
+        allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters);
+        this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));
+        this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));
+    }
+    sanitize(obj) {
+        const seen = new Set();
+        return JSON.stringify(obj, (key, value) => {
+            // Ensure Errors include their interesting non-enumerable members
+            if (value instanceof Error) {
+                return Object.assign(Object.assign({}, value), { name: value.name, message: value.message });
+            }
+            if (key === "headers") {
+                return this.sanitizeHeaders(value);
+            }
+            else if (key === "url") {
+                return this.sanitizeUrl(value);
+            }
+            else if (key === "query") {
+                return this.sanitizeQuery(value);
+            }
+            else if (key === "body") {
+                // Don't log the request body
+                return undefined;
+            }
+            else if (key === "response") {
+                // Don't log response again
+                return undefined;
+            }
+            else if (key === "operationSpec") {
+                // When using sendOperationRequest, the request carries a massive
+                // field with the autorest spec. No need to log it.
+                return undefined;
+            }
+            else if (Array.isArray(value) || (0, core_util_1.isObject)(value)) {
+                if (seen.has(value)) {
+                    return "[Circular]";
+                }
+                seen.add(value);
+            }
+            return value;
+        }, 2);
+    }
+    sanitizeUrl(value) {
+        if (typeof value !== "string" || value === null || value === "") {
+            return value;
+        }
+        const url = new URL(value);
+        if (!url.search) {
+            return value;
+        }
+        for (const [key] of url.searchParams) {
+            if (!this.allowedQueryParameters.has(key.toLowerCase())) {
+                url.searchParams.set(key, RedactedString);
+            }
+        }
+        return url.toString();
+    }
+    sanitizeHeaders(obj) {
+        const sanitized = {};
+        for (const key of Object.keys(obj)) {
+            if (this.allowedHeaderNames.has(key.toLowerCase())) {
+                sanitized[key] = obj[key];
+            }
+            else {
+                sanitized[key] = RedactedString;
+            }
+        }
+        return sanitized;
+    }
+    sanitizeQuery(value) {
+        if (typeof value !== "object" || value === null) {
+            return value;
+        }
+        const sanitized = {};
+        for (const k of Object.keys(value)) {
+            if (this.allowedQueryParameters.has(k.toLowerCase())) {
+                sanitized[k] = value[k];
+            }
+            else {
+                sanitized[k] = RedactedString;
+            }
+        }
+        return sanitized;
+    }
+}
+exports.Sanitizer = Sanitizer;
+//# sourceMappingURL=sanitizer.js.map
+
+/***/ }),
+
+/***/ 47:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DEFAULT_CYCLER_OPTIONS = void 0;
+exports.createTokenCycler = createTokenCycler;
+const helpers_js_1 = __nccwpck_require__(6935);
+// Default options for the cycler if none are provided
+exports.DEFAULT_CYCLER_OPTIONS = {
+    forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires
+    retryIntervalInMs: 3000, // Allow refresh attempts every 3s
+    refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry
+};
+/**
+ * Converts an an unreliable access token getter (which may resolve with null)
+ * into an AccessTokenGetter by retrying the unreliable getter in a regular
+ * interval.
+ *
+ * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null.
+ * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts.
+ * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception.
+ * @returns - A promise that, if it resolves, will resolve with an access token.
+ */
+async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) {
+    // This wrapper handles exceptions gracefully as long as we haven't exceeded
+    // the timeout.
+    async function tryGetAccessToken() {
+        if (Date.now() < refreshTimeout) {
+            try {
+                return await getAccessToken();
+            }
+            catch (_a) {
+                return null;
+            }
+        }
+        else {
+            const finalToken = await getAccessToken();
+            // Timeout is up, so throw if it's still null
+            if (finalToken === null) {
+                throw new Error("Failed to refresh access token.");
+            }
+            return finalToken;
+        }
+    }
+    let token = await tryGetAccessToken();
+    while (token === null) {
+        await (0, helpers_js_1.delay)(retryIntervalInMs);
+        token = await tryGetAccessToken();
+    }
+    return token;
+}
+/**
+ * Creates a token cycler from a credential, scopes, and optional settings.
+ *
+ * A token cycler represents a way to reliably retrieve a valid access token
+ * from a TokenCredential. It will handle initializing the token, refreshing it
+ * when it nears expiration, and synchronizes refresh attempts to avoid
+ * concurrency hazards.
+ *
+ * @param credential - the underlying TokenCredential that provides the access
+ * token
+ * @param tokenCyclerOptions - optionally override default settings for the cycler
+ *
+ * @returns - a function that reliably produces a valid access token
+ */
+function createTokenCycler(credential, tokenCyclerOptions) {
+    let refreshWorker = null;
+    let token = null;
+    let tenantId;
+    const options = Object.assign(Object.assign({}, exports.DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions);
+    /**
+     * This little holder defines several predicates that we use to construct
+     * the rules of refreshing the token.
+     */
+    const cycler = {
+        /**
+         * Produces true if a refresh job is currently in progress.
+         */
+        get isRefreshing() {
+            return refreshWorker !== null;
+        },
+        /**
+         * Produces true if the cycler SHOULD refresh (we are within the refresh
+         * window and not already refreshing)
+         */
+        get shouldRefresh() {
+            var _a;
+            if (cycler.isRefreshing) {
+                return false;
+            }
+            if ((token === null || token === void 0 ? void 0 : token.refreshAfterTimestamp) && token.refreshAfterTimestamp < Date.now()) {
+                return true;
+            }
+            return ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now();
+        },
+        /**
+         * Produces true if the cycler MUST refresh (null or nearly-expired
+         * token).
+         */
+        get mustRefresh() {
+            return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now());
+        },
+    };
+    /**
+     * Starts a refresh job or returns the existing job if one is already
+     * running.
+     */
+    function refresh(scopes, getTokenOptions) {
+        var _a;
+        if (!cycler.isRefreshing) {
+            // We bind `scopes` here to avoid passing it around a lot
+            const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions);
+            // Take advantage of promise chaining to insert an assignment to `token`
+            // before the refresh can be considered done.
+            refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, 
+            // If we don't have a token, then we should timeout immediately
+            (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now())
+                .then((_token) => {
+                refreshWorker = null;
+                token = _token;
+                tenantId = getTokenOptions.tenantId;
+                return token;
+            })
+                .catch((reason) => {
+                // We also should reset the refresher if we enter a failed state.  All
+                // existing awaiters will throw, but subsequent requests will start a
+                // new retry chain.
+                refreshWorker = null;
+                token = null;
+                tenantId = undefined;
+                throw reason;
+            });
+        }
+        return refreshWorker;
+    }
+    return async (scopes, tokenOptions) => {
+        //
+        // Simple rules:
+        // - If we MUST refresh, then return the refresh task, blocking
+        //   the pipeline until a token is available.
+        // - If we SHOULD refresh, then run refresh but don't return it
+        //   (we can still use the cached token).
+        // - Return the token, since it's fine if we didn't return in
+        //   step 1.
+        //
+        const hasClaimChallenge = Boolean(tokenOptions.claims);
+        const tenantIdChanged = tenantId !== tokenOptions.tenantId;
+        if (hasClaimChallenge) {
+            // If we've received a claim, we know the existing token isn't valid
+            // We want to clear it so that that refresh worker won't use the old expiration time as a timeout
+            token = null;
+        }
+        // If the tenantId passed in token options is different to the one we have
+        // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to
+        // refresh the token with the new tenantId or token.
+        const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh;
+        if (mustRefresh) {
+            return refresh(scopes, tokenOptions);
+        }
+        if (cycler.shouldRefresh) {
+            refresh(scopes, tokenOptions);
+        }
+        return token;
+    };
+}
+//# sourceMappingURL=tokenCycler.js.map
+
+/***/ }),
+
+/***/ 3246:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isNodeReadableStream = isNodeReadableStream;
+exports.isWebReadableStream = isWebReadableStream;
+exports.isReadableStream = isReadableStream;
+exports.isBlob = isBlob;
+function isNodeReadableStream(x) {
+    return Boolean(x && typeof x["pipe"] === "function");
+}
+function isWebReadableStream(x) {
+    return Boolean(x &&
+        typeof x.getReader === "function" &&
+        typeof x.tee === "function");
+}
+function isReadableStream(x) {
+    return isNodeReadableStream(x) || isWebReadableStream(x);
+}
+function isBlob(x) {
+    return typeof x.stream === "function";
+}
+//# sourceMappingURL=typeGuards.js.map
+
+/***/ }),
+
+/***/ 286:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getUserAgentHeaderName = getUserAgentHeaderName;
+exports.getUserAgentValue = getUserAgentValue;
+const userAgentPlatform_js_1 = __nccwpck_require__(2501);
+const constants_js_1 = __nccwpck_require__(1652);
+function getUserAgentString(telemetryInfo) {
+    const parts = [];
+    for (const [key, value] of telemetryInfo) {
+        const token = value ? `${key}/${value}` : key;
+        parts.push(token);
+    }
+    return parts.join(" ");
+}
+/**
+ * @internal
+ */
+function getUserAgentHeaderName() {
+    return (0, userAgentPlatform_js_1.getHeaderName)();
+}
+/**
+ * @internal
+ */
+async function getUserAgentValue(prefix) {
+    const runtimeInfo = new Map();
+    runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION);
+    await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo);
+    const defaultAgent = getUserAgentString(runtimeInfo);
+    const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent;
+    return userAgentValue;
+}
+//# sourceMappingURL=userAgent.js.map
+
+/***/ }),
+
+/***/ 2501:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getHeaderName = getHeaderName;
+exports.setPlatformSpecificData = setPlatformSpecificData;
+const tslib_1 = __nccwpck_require__(1577);
+const os = tslib_1.__importStar(__nccwpck_require__(8161));
+const process = tslib_1.__importStar(__nccwpck_require__(1708));
+/**
+ * @internal
+ */
+function getHeaderName() {
+    return "User-Agent";
+}
+/**
+ * @internal
+ */
+async function setPlatformSpecificData(map) {
+    if (process && process.versions) {
+        const versions = process.versions;
+        if (versions.bun) {
+            map.set("Bun", versions.bun);
+        }
+        else if (versions.deno) {
+            map.set("Deno", versions.deno);
+        }
+        else if (versions.node) {
+            map.set("Node", versions.node);
+        }
+    }
+    map.set("OS", `(${os.arch()}-${os.type()}-${os.release()})`);
+}
+//# sourceMappingURL=userAgentPlatform.js.map
+
+/***/ }),
+
+/***/ 9340:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createTracingClient = exports.useInstrumenter = void 0;
+var instrumenter_js_1 = __nccwpck_require__(7248);
+Object.defineProperty(exports, "useInstrumenter", ({ enumerable: true, get: function () { return instrumenter_js_1.useInstrumenter; } }));
+var tracingClient_js_1 = __nccwpck_require__(7009);
+Object.defineProperty(exports, "createTracingClient", ({ enumerable: true, get: function () { return tracingClient_js_1.createTracingClient; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 7248:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createDefaultTracingSpan = createDefaultTracingSpan;
+exports.createDefaultInstrumenter = createDefaultInstrumenter;
+exports.useInstrumenter = useInstrumenter;
+exports.getInstrumenter = getInstrumenter;
+const tracingContext_js_1 = __nccwpck_require__(2075);
+const state_js_1 = __nccwpck_require__(333);
+function createDefaultTracingSpan() {
+    return {
+        end: () => {
+            // noop
+        },
+        isRecording: () => false,
+        recordException: () => {
+            // noop
+        },
+        setAttribute: () => {
+            // noop
+        },
+        setStatus: () => {
+            // noop
+        },
+        addEvent: () => {
+            // noop
+        },
+    };
+}
+function createDefaultInstrumenter() {
+    return {
+        createRequestHeaders: () => {
+            return {};
+        },
+        parseTraceparentHeader: () => {
+            return undefined;
+        },
+        startSpan: (_name, spanOptions) => {
+            return {
+                span: createDefaultTracingSpan(),
+                tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }),
+            };
+        },
+        withContext(_context, callback, ...callbackArgs) {
+            return callback(...callbackArgs);
+        },
+    };
+}
+/**
+ * Extends the Azure SDK with support for a given instrumenter implementation.
+ *
+ * @param instrumenter - The instrumenter implementation to use.
+ */
+function useInstrumenter(instrumenter) {
+    state_js_1.state.instrumenterImplementation = instrumenter;
+}
+/**
+ * Gets the currently set instrumenter, a No-Op instrumenter by default.
+ *
+ * @returns The currently set instrumenter
+ */
+function getInstrumenter() {
+    if (!state_js_1.state.instrumenterImplementation) {
+        state_js_1.state.instrumenterImplementation = createDefaultInstrumenter();
+    }
+    return state_js_1.state.instrumenterImplementation;
+}
+//# sourceMappingURL=instrumenter.js.map
+
+/***/ }),
+
+/***/ 333:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.state = void 0;
+/**
+ * @internal
+ *
+ * Holds the singleton instrumenter, to be shared across CJS and ESM imports.
+ */
+exports.state = {
+    instrumenterImplementation: undefined,
+};
+//# sourceMappingURL=state-cjs.cjs.map
+
+/***/ }),
+
+/***/ 7009:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createTracingClient = createTracingClient;
+const instrumenter_js_1 = __nccwpck_require__(7248);
+const tracingContext_js_1 = __nccwpck_require__(2075);
+/**
+ * Creates a new tracing client.
+ *
+ * @param options - Options used to configure the tracing client.
+ * @returns - An instance of {@link TracingClient}.
+ */
+function createTracingClient(options) {
+    const { namespace, packageName, packageVersion } = options;
+    function startSpan(name, operationOptions, spanOptions) {
+        var _a;
+        const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext }));
+        let tracingContext = startSpanResult.tracingContext;
+        const span = startSpanResult.span;
+        if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) {
+            tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace);
+        }
+        span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace));
+        const updatedOptions = Object.assign({}, operationOptions, {
+            tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }),
+        });
+        return {
+            span,
+            updatedOptions,
+        };
+    }
+    async function withSpan(name, operationOptions, callback, spanOptions) {
+        const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions);
+        try {
+            const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span)));
+            span.setStatus({ status: "success" });
+            return result;
+        }
+        catch (err) {
+            span.setStatus({ status: "error", error: err });
+            throw err;
+        }
+        finally {
+            span.end();
+        }
+    }
+    function withContext(context, callback, ...callbackArgs) {
+        return (0, instrumenter_js_1.getInstrumenter)().withContext(context, callback, ...callbackArgs);
+    }
+    /**
+     * Parses a traceparent header value into a span identifier.
+     *
+     * @param traceparentHeader - The traceparent header to parse.
+     * @returns An implementation-specific identifier for the span.
+     */
+    function parseTraceparentHeader(traceparentHeader) {
+        return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader);
+    }
+    /**
+     * Creates a set of request headers to propagate tracing information to a backend.
+     *
+     * @param tracingContext - The context containing the span to serialize.
+     * @returns The set of headers to add to a request.
+     */
+    function createRequestHeaders(tracingContext) {
+        return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext);
+    }
+    return {
+        startSpan,
+        withSpan,
+        withContext,
+        parseTraceparentHeader,
+        createRequestHeaders,
+    };
+}
+//# sourceMappingURL=tracingClient.js.map
+
+/***/ }),
+
+/***/ 2075:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.TracingContextImpl = exports.knownContextKeys = void 0;
+exports.createTracingContext = createTracingContext;
+/** @internal */
+exports.knownContextKeys = {
+    span: Symbol.for("@azure/core-tracing span"),
+    namespace: Symbol.for("@azure/core-tracing namespace"),
+};
+/**
+ * Creates a new {@link TracingContext} with the given options.
+ * @param options - A set of known keys that may be set on the context.
+ * @returns A new {@link TracingContext} with the given options.
+ *
+ * @internal
+ */
+function createTracingContext(options = {}) {
+    let context = new TracingContextImpl(options.parentContext);
+    if (options.span) {
+        context = context.setValue(exports.knownContextKeys.span, options.span);
+    }
+    if (options.namespace) {
+        context = context.setValue(exports.knownContextKeys.namespace, options.namespace);
+    }
+    return context;
+}
+/** @internal */
+class TracingContextImpl {
+    constructor(initialContext) {
+        this._contextMap =
+            initialContext instanceof TracingContextImpl
+                ? new Map(initialContext._contextMap)
+                : new Map();
+    }
+    setValue(key, value) {
+        const newContext = new TracingContextImpl(this);
+        newContext._contextMap.set(key, value);
+        return newContext;
+    }
+    getValue(key) {
+        return this._contextMap.get(key);
+    }
+    deleteValue(key) {
+        const newContext = new TracingContextImpl(this);
+        newContext._contextMap.delete(key);
+        return newContext;
+    }
+}
+exports.TracingContextImpl = TracingContextImpl;
+//# sourceMappingURL=tracingContext.js.map
+
+/***/ }),
+
+/***/ 6826:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.cancelablePromiseRace = cancelablePromiseRace;
+/**
+ * promise.race() wrapper that aborts rest of promises as soon as the first promise settles.
+ */
+async function cancelablePromiseRace(abortablePromiseBuilders, options) {
+    var _a, _b;
+    const aborter = new AbortController();
+    function abortHandler() {
+        aborter.abort();
+    }
+    (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler);
+    try {
+        return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal })));
+    }
+    finally {
+        aborter.abort();
+        (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler);
+    }
+}
+//# sourceMappingURL=aborterUtils.js.map
+
+/***/ }),
+
+/***/ 6508:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.uint8ArrayToString = uint8ArrayToString;
+exports.stringToUint8Array = stringToUint8Array;
+/**
+ * The helper that transforms bytes with specific character encoding into string
+ * @param bytes - the uint8array bytes
+ * @param format - the format we use to encode the byte
+ * @returns a string of the encoded string
+ */
+function uint8ArrayToString(bytes, format) {
+    return Buffer.from(bytes).toString(format);
+}
+/**
+ * The helper that transforms string to specific character encoded bytes array.
+ * @param value - the string to be converted
+ * @param format - the format we use to decode the value
+ * @returns a uint8array
+ */
+function stringToUint8Array(value, format) {
+    return Buffer.from(value, format);
+}
+//# sourceMappingURL=bytesEncoding.js.map
+
+/***/ }),
+
+/***/ 5869:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+var _a, _b, _c, _d;
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isReactNative = exports.isNodeRuntime = exports.isNode = exports.isNodeLike = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0;
+/**
+ * A constant that indicates whether the environment the code is running is a Web Browser.
+ */
+// eslint-disable-next-line @azure/azure-sdk/ts-no-window
+exports.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined";
+/**
+ * A constant that indicates whether the environment the code is running is a Web Worker.
+ */
+exports.isWebWorker = typeof self === "object" &&
+    typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" &&
+    (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" ||
+        ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" ||
+        ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope");
+/**
+ * A constant that indicates whether the environment the code is running is Deno.
+ */
+exports.isDeno = typeof Deno !== "undefined" &&
+    typeof Deno.version !== "undefined" &&
+    typeof Deno.version.deno !== "undefined";
+/**
+ * A constant that indicates whether the environment the code is running is Bun.sh.
+ */
+exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined";
+/**
+ * A constant that indicates whether the environment the code is running is a Node.js compatible environment.
+ */
+exports.isNodeLike = typeof globalThis.process !== "undefined" &&
+    Boolean(globalThis.process.version) &&
+    Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node);
+/**
+ * A constant that indicates whether the environment the code is running is a Node.js compatible environment.
+ * @deprecated Use `isNodeLike` instead.
+ */
+exports.isNode = exports.isNodeLike;
+/**
+ * A constant that indicates whether the environment the code is running is Node.JS.
+ */
+exports.isNodeRuntime = exports.isNodeLike && !exports.isBun && !exports.isDeno;
+/**
+ * A constant that indicates whether the environment the code is running is in React-Native.
+ */
+// https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js
+exports.isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative";
+//# sourceMappingURL=checkEnvironment.js.map
+
+/***/ }),
+
+/***/ 9639:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createAbortablePromise = createAbortablePromise;
+const abort_controller_1 = __nccwpck_require__(764);
+/**
+ * Creates an abortable promise.
+ * @param buildPromise - A function that takes the resolve and reject functions as parameters.
+ * @param options - The options for the abortable promise.
+ * @returns A promise that can be aborted.
+ */
+function createAbortablePromise(buildPromise, options) {
+    const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {};
+    return new Promise((resolve, reject) => {
+        function rejectOnAbort() {
+            reject(new abort_controller_1.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted."));
+        }
+        function removeListeners() {
+            abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort);
+        }
+        function onAbort() {
+            cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort();
+            removeListeners();
+            rejectOnAbort();
+        }
+        if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) {
+            return rejectOnAbort();
+        }
+        try {
+            buildPromise((x) => {
+                removeListeners();
+                resolve(x);
+            }, (x) => {
+                removeListeners();
+                reject(x);
+            });
+        }
+        catch (err) {
+            reject(err);
+        }
+        abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort);
+    });
+}
+//# sourceMappingURL=createAbortablePromise.js.map
+
+/***/ }),
+
+/***/ 2389:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.delay = delay;
+exports.calculateRetryDelay = calculateRetryDelay;
+const createAbortablePromise_js_1 = __nccwpck_require__(9639);
+const random_js_1 = __nccwpck_require__(987);
+const StandardAbortMessage = "The delay was aborted.";
+/**
+ * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds.
+ * @param timeInMs - The number of milliseconds to be delayed.
+ * @param options - The options for delay - currently abort options
+ * @returns Promise that is resolved after timeInMs
+ */
+function delay(timeInMs, options) {
+    let token;
+    const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {};
+    return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve) => {
+        token = setTimeout(resolve, timeInMs);
+    }, {
+        cleanupBeforeAbort: () => clearTimeout(token),
+        abortSignal,
+        abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage,
+    });
+}
+/**
+ * Calculates the delay interval for retry attempts using exponential delay with jitter.
+ * @param retryAttempt - The current retry attempt number.
+ * @param config - The exponential retry configuration.
+ * @returns An object containing the calculated retry delay.
+ */
+function calculateRetryDelay(retryAttempt, config) {
+    // Exponentially increase the delay each time
+    const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt);
+    // Don't let the delay exceed the maximum
+    const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay);
+    // Allow the final value to have some "jitter" (within 50% of the delay size) so
+    // that retries across multiple clients don't occur simultaneously.
+    const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2);
+    return { retryAfterInMs };
+}
+//# sourceMappingURL=delay.js.map
+
+/***/ }),
+
+/***/ 6024:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isError = isError;
+exports.getErrorMessage = getErrorMessage;
+const object_js_1 = __nccwpck_require__(6943);
+/**
+ * Typeguard for an error object shape (has name and message)
+ * @param e - Something caught by a catch clause.
+ */
+function isError(e) {
+    if ((0, object_js_1.isObject)(e)) {
+        const hasName = typeof e.name === "string";
+        const hasMessage = typeof e.message === "string";
+        return hasName && hasMessage;
+    }
+    return false;
+}
+/**
+ * Given what is thought to be an error object, return the message if possible.
+ * If the message is missing, returns a stringified version of the input.
+ * @param e - Something thrown from a try block
+ * @returns The error message or a string of the input
+ */
+function getErrorMessage(e) {
+    if (isError(e)) {
+        return e.message;
+    }
+    else {
+        let stringified;
+        try {
+            if (typeof e === "object" && e) {
+                stringified = JSON.stringify(e);
+            }
+            else {
+                stringified = String(e);
+            }
+        }
+        catch (err) {
+            stringified = "[unable to stringify input]";
+        }
+        return `Unknown error ${stringified}`;
+    }
+}
+//# sourceMappingURL=error.js.map
+
+/***/ }),
+
+/***/ 402:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNodeRuntime = exports.isNodeLike = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.calculateRetryDelay = exports.delay = void 0;
+var delay_js_1 = __nccwpck_require__(2389);
+Object.defineProperty(exports, "delay", ({ enumerable: true, get: function () { return delay_js_1.delay; } }));
+Object.defineProperty(exports, "calculateRetryDelay", ({ enumerable: true, get: function () { return delay_js_1.calculateRetryDelay; } }));
+var aborterUtils_js_1 = __nccwpck_require__(6826);
+Object.defineProperty(exports, "cancelablePromiseRace", ({ enumerable: true, get: function () { return aborterUtils_js_1.cancelablePromiseRace; } }));
+var createAbortablePromise_js_1 = __nccwpck_require__(9639);
+Object.defineProperty(exports, "createAbortablePromise", ({ enumerable: true, get: function () { return createAbortablePromise_js_1.createAbortablePromise; } }));
+var random_js_1 = __nccwpck_require__(987);
+Object.defineProperty(exports, "getRandomIntegerInclusive", ({ enumerable: true, get: function () { return random_js_1.getRandomIntegerInclusive; } }));
+var object_js_1 = __nccwpck_require__(6943);
+Object.defineProperty(exports, "isObject", ({ enumerable: true, get: function () { return object_js_1.isObject; } }));
+var error_js_1 = __nccwpck_require__(6024);
+Object.defineProperty(exports, "isError", ({ enumerable: true, get: function () { return error_js_1.isError; } }));
+Object.defineProperty(exports, "getErrorMessage", ({ enumerable: true, get: function () { return error_js_1.getErrorMessage; } }));
+var sha256_js_1 = __nccwpck_require__(9723);
+Object.defineProperty(exports, "computeSha256Hash", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hash; } }));
+Object.defineProperty(exports, "computeSha256Hmac", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hmac; } }));
+var typeGuards_js_1 = __nccwpck_require__(1218);
+Object.defineProperty(exports, "isDefined", ({ enumerable: true, get: function () { return typeGuards_js_1.isDefined; } }));
+Object.defineProperty(exports, "isObjectWithProperties", ({ enumerable: true, get: function () { return typeGuards_js_1.isObjectWithProperties; } }));
+Object.defineProperty(exports, "objectHasProperty", ({ enumerable: true, get: function () { return typeGuards_js_1.objectHasProperty; } }));
+var uuidUtils_js_1 = __nccwpck_require__(5977);
+Object.defineProperty(exports, "randomUUID", ({ enumerable: true, get: function () { return uuidUtils_js_1.randomUUID; } }));
+var checkEnvironment_js_1 = __nccwpck_require__(5869);
+Object.defineProperty(exports, "isBrowser", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBrowser; } }));
+Object.defineProperty(exports, "isBun", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBun; } }));
+Object.defineProperty(exports, "isNode", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNode; } }));
+Object.defineProperty(exports, "isNodeLike", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeLike; } }));
+Object.defineProperty(exports, "isNodeRuntime", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeRuntime; } }));
+Object.defineProperty(exports, "isDeno", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isDeno; } }));
+Object.defineProperty(exports, "isReactNative", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isReactNative; } }));
+Object.defineProperty(exports, "isWebWorker", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isWebWorker; } }));
+var bytesEncoding_js_1 = __nccwpck_require__(6508);
+Object.defineProperty(exports, "uint8ArrayToString", ({ enumerable: true, get: function () { return bytesEncoding_js_1.uint8ArrayToString; } }));
+Object.defineProperty(exports, "stringToUint8Array", ({ enumerable: true, get: function () { return bytesEncoding_js_1.stringToUint8Array; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 6943:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isObject = isObject;
+/**
+ * Helper to determine when an input is a generic JS object.
+ * @returns true when input is an object type that is not null, Array, RegExp, or Date.
+ */
+function isObject(input) {
+    return (typeof input === "object" &&
+        input !== null &&
+        !Array.isArray(input) &&
+        !(input instanceof RegExp) &&
+        !(input instanceof Date));
+}
+//# sourceMappingURL=object.js.map
+
+/***/ }),
+
+/***/ 987:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getRandomIntegerInclusive = getRandomIntegerInclusive;
+/**
+ * Returns a random integer value between a lower and upper bound,
+ * inclusive of both bounds.
+ * Note that this uses Math.random and isn't secure. If you need to use
+ * this for any kind of security purpose, find a better source of random.
+ * @param min - The smallest integer value allowed.
+ * @param max - The largest integer value allowed.
+ */
+function getRandomIntegerInclusive(min, max) {
+    // Make sure inputs are integers.
+    min = Math.ceil(min);
+    max = Math.floor(max);
+    // Pick a random offset from zero to the size of the range.
+    // Since Math.random() can never return 1, we have to make the range one larger
+    // in order to be inclusive of the maximum value after we take the floor.
+    const offset = Math.floor(Math.random() * (max - min + 1));
+    return offset + min;
+}
+//# sourceMappingURL=random.js.map
+
+/***/ }),
+
+/***/ 9723:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.computeSha256Hmac = computeSha256Hmac;
+exports.computeSha256Hash = computeSha256Hash;
+const crypto_1 = __nccwpck_require__(6982);
+/**
+ * Generates a SHA-256 HMAC signature.
+ * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash.
+ * @param stringToSign - The data to be signed.
+ * @param encoding - The textual encoding to use for the returned HMAC digest.
+ */
+async function computeSha256Hmac(key, stringToSign, encoding) {
+    const decodedKey = Buffer.from(key, "base64");
+    return (0, crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding);
+}
+/**
+ * Generates a SHA-256 hash.
+ * @param content - The data to be included in the hash.
+ * @param encoding - The textual encoding to use for the returned hash.
+ */
+async function computeSha256Hash(content, encoding) {
+    return (0, crypto_1.createHash)("sha256").update(content).digest(encoding);
+}
+//# sourceMappingURL=sha256.js.map
+
+/***/ }),
+
+/***/ 1218:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isDefined = isDefined;
+exports.isObjectWithProperties = isObjectWithProperties;
+exports.objectHasProperty = objectHasProperty;
+/**
+ * Helper TypeGuard that checks if something is defined or not.
+ * @param thing - Anything
+ */
+function isDefined(thing) {
+    return typeof thing !== "undefined" && thing !== null;
+}
+/**
+ * Helper TypeGuard that checks if the input is an object with the specified properties.
+ * @param thing - Anything.
+ * @param properties - The name of the properties that should appear in the object.
+ */
+function isObjectWithProperties(thing, properties) {
+    if (!isDefined(thing) || typeof thing !== "object") {
+        return false;
+    }
+    for (const property of properties) {
+        if (!objectHasProperty(thing, property)) {
+            return false;
+        }
+    }
+    return true;
+}
+/**
+ * Helper TypeGuard that checks if the input is an object with the specified property.
+ * @param thing - Any object.
+ * @param property - The name of the property that should appear in the object.
+ */
+function objectHasProperty(thing, property) {
+    return (isDefined(thing) && typeof thing === "object" && property in thing);
+}
+//# sourceMappingURL=typeGuards.js.map
+
+/***/ }),
+
+/***/ 5977:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+var _a;
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.randomUUID = randomUUID;
+const crypto_1 = __nccwpck_require__(6982);
+// NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+.
+const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function"
+    ? globalThis.crypto.randomUUID.bind(globalThis.crypto)
+    : crypto_1.randomUUID;
+/**
+ * Generated Universally Unique Identifier
+ *
+ * @returns RFC4122 v4 UUID.
+ */
+function randomUUID() {
+    return uuidFunction();
+}
+//# sourceMappingURL=uuidUtils.js.map
+
+/***/ }),
+
+/***/ 9343:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.parseXML = exports.stringifyXML = void 0;
+var xml_js_1 = __nccwpck_require__(3709);
+Object.defineProperty(exports, "stringifyXML", ({ enumerable: true, get: function () { return xml_js_1.stringifyXML; } }));
+Object.defineProperty(exports, "parseXML", ({ enumerable: true, get: function () { return xml_js_1.parseXML; } }));
+var xml_common_js_1 = __nccwpck_require__(2486);
+Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_ATTRKEY; } }));
+Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_CHARKEY; } }));
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 2486:
+/***/ ((__unused_webpack_module, exports) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0;
+/**
+ * Default key used to access the XML attributes.
+ */
+exports.XML_ATTRKEY = "$";
+/**
+ * Default key used to access the XML value content.
+ */
+exports.XML_CHARKEY = "_";
+//# sourceMappingURL=xml.common.js.map
+
+/***/ }),
+
+/***/ 3709:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.stringifyXML = stringifyXML;
+exports.parseXML = parseXML;
+const fast_xml_parser_1 = __nccwpck_require__(7208);
+const xml_common_js_1 = __nccwpck_require__(2486);
+function getCommonOptions(options) {
+    var _a;
+    return {
+        attributesGroupName: xml_common_js_1.XML_ATTRKEY,
+        textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY,
+        ignoreAttributes: false,
+        suppressBooleanAttributes: false,
+    };
+}
+function getSerializerOptions(options = {}) {
+    var _a, _b;
+    return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" });
+}
+function getParserOptions(options = {}) {
+    return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true });
+}
+/**
+ * Converts given JSON object to XML string
+ * @param obj - JSON object to be converted into XML string
+ * @param opts - Options that govern the XML building of given JSON object
+ * `rootName` indicates the name of the root element in the resulting XML
+ */
+function stringifyXML(obj, opts = {}) {
+    const parserOptions = getSerializerOptions(opts);
+    const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions);
+    const node = { [parserOptions.rootNodeName]: obj };
+    const xmlData = j2x.build(node);
+    return `<?xml version="1.0" encoding="UTF-8" standalone="yes"?>${xmlData}`.replace(/\n/g, "");
+}
+/**
+ * Converts given XML string into JSON
+ * @param str - String containing the XML content to be parsed into JSON
+ * @param opts - Options that govern the parsing of given xml string
+ * `includeRoot` indicates whether the root element is to be included or not in the output
+ */
+async function parseXML(str, opts = {}) {
+    if (!str) {
+        throw new Error("Document is empty");
+    }
+    const validation = fast_xml_parser_1.XMLValidator.validate(str);
+    if (validation !== true) {
+        throw validation;
+    }
+    const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts));
+    const parsedXml = parser.parse(str);
+    // Remove the <?xml version="..." ?> node.
+    // This is a change in behavior on fxp v4. Issue #424
+    if (parsedXml["?xml"]) {
+        delete parsedXml["?xml"];
+    }
+    if (!opts.includeRoot) {
+        for (const key of Object.keys(parsedXml)) {
+            const value = parsedXml[key];
+            return typeof value === "object" ? Object.assign({}, value) : value;
+        }
+    }
+    return parsedXml;
+}
+//# sourceMappingURL=xml.js.map
+
+/***/ }),
+
+/***/ 6676:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const log_js_1 = __nccwpck_require__(1165);
+const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined;
+let enabledString;
+let enabledNamespaces = [];
+let skippedNamespaces = [];
+const debuggers = [];
+if (debugEnvVariable) {
+    enable(debugEnvVariable);
+}
+const debugObj = Object.assign((namespace) => {
+    return createDebugger(namespace);
+}, {
+    enable,
+    enabled,
+    disable,
+    log: log_js_1.log,
+});
+function enable(namespaces) {
+    enabledString = namespaces;
+    enabledNamespaces = [];
+    skippedNamespaces = [];
+    const wildcard = /\*/g;
+    const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?"));
+    for (const ns of namespaceList) {
+        if (ns.startsWith("-")) {
+            skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));
+        }
+        else {
+            enabledNamespaces.push(new RegExp(`^${ns}$`));
+        }
+    }
+    for (const instance of debuggers) {
+        instance.enabled = enabled(instance.namespace);
+    }
+}
+function enabled(namespace) {
+    if (namespace.endsWith("*")) {
+        return true;
+    }
+    for (const skipped of skippedNamespaces) {
+        if (skipped.test(namespace)) {
+            return false;
+        }
+    }
+    for (const enabledNamespace of enabledNamespaces) {
+        if (enabledNamespace.test(namespace)) {
+            return true;
+        }
+    }
+    return false;
+}
+function disable() {
+    const result = enabledString || "";
+    enable("");
+    return result;
+}
+function createDebugger(namespace) {
+    const newDebugger = Object.assign(debug, {
+        enabled: enabled(namespace),
+        destroy,
+        log: debugObj.log,
+        namespace,
+        extend,
+    });
+    function debug(...args) {
+        if (!newDebugger.enabled) {
+            return;
+        }
+        if (args.length > 0) {
+            args[0] = `${namespace} ${args[0]}`;
+        }
+        newDebugger.log(...args);
+    }
+    debuggers.push(newDebugger);
+    return newDebugger;
+}
+function destroy() {
+    const index = debuggers.indexOf(this);
+    if (index >= 0) {
+        debuggers.splice(index, 1);
+        return true;
+    }
+    return false;
+}
+function extend(namespace) {
+    const newDebugger = createDebugger(`${this.namespace}:${namespace}`);
+    newDebugger.log = this.log;
+    return newDebugger;
+}
+exports["default"] = debugObj;
+//# sourceMappingURL=debug.js.map
+
+/***/ }),
+
+/***/ 5851:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.AzureLogger = void 0;
+exports.setLogLevel = setLogLevel;
+exports.getLogLevel = getLogLevel;
+exports.createClientLogger = createClientLogger;
+const tslib_1 = __nccwpck_require__(1577);
+const debug_js_1 = tslib_1.__importDefault(__nccwpck_require__(6676));
+const registeredLoggers = new Set();
+const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;
+let azureLogLevel;
+/**
+ * The AzureLogger provides a mechanism for overriding where logs are output to.
+ * By default, logs are sent to stderr.
+ * Override the `log` method to redirect logs to another location.
+ */
+exports.AzureLogger = (0, debug_js_1.default)("azure");
+exports.AzureLogger.log = (...args) => {
+    debug_js_1.default.log(...args);
+};
+const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"];
+if (logLevelFromEnv) {
+    // avoid calling setLogLevel because we don't want a mis-set environment variable to crash
+    if (isAzureLogLevel(logLevelFromEnv)) {
+        setLogLevel(logLevelFromEnv);
+    }
+    else {
+        console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`);
+    }
+}
+/**
+ * Immediately enables logging at the specified log level. If no level is specified, logging is disabled.
+ * @param level - The log level to enable for logging.
+ * Options from most verbose to least verbose are:
+ * - verbose
+ * - info
+ * - warning
+ * - error
+ */
+function setLogLevel(level) {
+    if (level && !isAzureLogLevel(level)) {
+        throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`);
+    }
+    azureLogLevel = level;
+    const enabledNamespaces = [];
+    for (const logger of registeredLoggers) {
+        if (shouldEnable(logger)) {
+            enabledNamespaces.push(logger.namespace);
+        }
+    }
+    debug_js_1.default.enable(enabledNamespaces.join(","));
+}
+/**
+ * Retrieves the currently specified log level.
+ */
+function getLogLevel() {
+    return azureLogLevel;
+}
+const levelMap = {
+    verbose: 400,
+    info: 300,
+    warning: 200,
+    error: 100,
+};
+/**
+ * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.
+ * @param namespace - The name of the SDK package.
+ * @hidden
+ */
+function createClientLogger(namespace) {
+    const clientRootLogger = exports.AzureLogger.extend(namespace);
+    patchLogMethod(exports.AzureLogger, clientRootLogger);
+    return {
+        error: createLogger(clientRootLogger, "error"),
+        warning: createLogger(clientRootLogger, "warning"),
+        info: createLogger(clientRootLogger, "info"),
+        verbose: createLogger(clientRootLogger, "verbose"),
+    };
+}
+function patchLogMethod(parent, child) {
+    child.log = (...args) => {
+        parent.log(...args);
+    };
+}
+function createLogger(parent, level) {
+    const logger = Object.assign(parent.extend(level), {
+        level,
+    });
+    patchLogMethod(parent, logger);
+    if (shouldEnable(logger)) {
+        const enabledNamespaces = debug_js_1.default.disable();
+        debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace);
+    }
+    registeredLoggers.add(logger);
+    return logger;
+}
+function shouldEnable(logger) {
+    return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]);
+}
+function isAzureLogLevel(logLevel) {
+    return AZURE_LOG_LEVELS.includes(logLevel);
+}
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 1165:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.log = log;
+const tslib_1 = __nccwpck_require__(1577);
+const node_os_1 = __nccwpck_require__(8161);
+const node_util_1 = tslib_1.__importDefault(__nccwpck_require__(7975));
+const process = tslib_1.__importStar(__nccwpck_require__(1708));
+function log(message, ...args) {
+    process.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`);
+}
+//# sourceMappingURL=log.js.map
+
+/***/ }),
+
+/***/ 6917:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const WritableStream = (__nccwpck_require__(7075).Writable)
+const inherits = (__nccwpck_require__(7975).inherits)
+
+const StreamSearch = __nccwpck_require__(439)
+
+const PartStream = __nccwpck_require__(5341)
+const HeaderParser = __nccwpck_require__(6890)
+
+const DASH = 45
+const B_ONEDASH = Buffer.from('-')
+const B_CRLF = Buffer.from('\r\n')
+const EMPTY_FN = function () {}
+
+function Dicer (cfg) {
+  if (!(this instanceof Dicer)) { return new Dicer(cfg) }
+  WritableStream.call(this, cfg)
+
+  if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
+
+  if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
+
+  this._headerFirst = cfg.headerFirst
+
+  this._dashes = 0
+  this._parts = 0
+  this._finished = false
+  this._realFinish = false
+  this._isPreamble = true
+  this._justMatched = false
+  this._firstWrite = true
+  this._inHeader = true
+  this._part = undefined
+  this._cb = undefined
+  this._ignoreData = false
+  this._partOpts = { highWaterMark: cfg.partHwm }
+  this._pause = false
+
+  const self = this
+  this._hparser = new HeaderParser(cfg)
+  this._hparser.on('header', function (header) {
+    self._inHeader = false
+    self._part.emit('header', header)
+  })
+}
+inherits(Dicer, WritableStream)
+
+Dicer.prototype.emit = function (ev) {
+  if (ev === 'finish' && !this._realFinish) {
+    if (!this._finished) {
+      const self = this
+      process.nextTick(function () {
+        self.emit('error', new Error('Unexpected end of multipart data'))
+        if (self._part && !self._ignoreData) {
+          const type = (self._isPreamble ? 'Preamble' : 'Part')
+          self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
+          self._part.push(null)
+          process.nextTick(function () {
+            self._realFinish = true
+            self.emit('finish')
+            self._realFinish = false
+          })
+          return
+        }
+        self._realFinish = true
+        self.emit('finish')
+        self._realFinish = false
+      })
+    }
+  } else { WritableStream.prototype.emit.apply(this, arguments) }
+}
+
+Dicer.prototype._write = function (data, encoding, cb) {
+  // ignore unexpected data (e.g. extra trailer data after finished)
+  if (!this._hparser && !this._bparser) { return cb() }
+
+  if (this._headerFirst && this._isPreamble) {
+    if (!this._part) {
+      this._part = new PartStream(this._partOpts)
+      if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() }
+    }
+    const r = this._hparser.push(data)
+    if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
+  }
+
+  // allows for "easier" testing
+  if (this._firstWrite) {
+    this._bparser.push(B_CRLF)
+    this._firstWrite = false
+  }
+
+  this._bparser.push(data)
+
+  if (this._pause) { this._cb = cb } else { cb() }
+}
+
+Dicer.prototype.reset = function () {
+  this._part = undefined
+  this._bparser = undefined
+  this._hparser = undefined
+}
+
+Dicer.prototype.setBoundary = function (boundary) {
+  const self = this
+  this._bparser = new StreamSearch('\r\n--' + boundary)
+  this._bparser.on('info', function (isMatch, data, start, end) {
+    self._oninfo(isMatch, data, start, end)
+  })
+}
+
+Dicer.prototype._ignore = function () {
+  if (this._part && !this._ignoreData) {
+    this._ignoreData = true
+    this._part.on('error', EMPTY_FN)
+    // we must perform some kind of read on the stream even though we are
+    // ignoring the data, otherwise node's Readable stream will not emit 'end'
+    // after pushing null to the stream
+    this._part.resume()
+  }
+}
+
+Dicer.prototype._oninfo = function (isMatch, data, start, end) {
+  let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
+
+  if (!this._part && this._justMatched && data) {
+    while (this._dashes < 2 && (start + i) < end) {
+      if (data[start + i] === DASH) {
+        ++i
+        ++this._dashes
+      } else {
+        if (this._dashes) { buf = B_ONEDASH }
+        this._dashes = 0
+        break
+      }
+    }
+    if (this._dashes === 2) {
+      if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) }
+      this.reset()
+      this._finished = true
+      // no more parts will be added
+      if (self._parts === 0) {
+        self._realFinish = true
+        self.emit('finish')
+        self._realFinish = false
+      }
+    }
+    if (this._dashes) { return }
+  }
+  if (this._justMatched) { this._justMatched = false }
+  if (!this._part) {
+    this._part = new PartStream(this._partOpts)
+    this._part._read = function (n) {
+      self._unpause()
+    }
+    if (this._isPreamble && this.listenerCount('preamble') !== 0) {
+      this.emit('preamble', this._part)
+    } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) {
+      this.emit('part', this._part)
+    } else {
+      this._ignore()
+    }
+    if (!this._isPreamble) { this._inHeader = true }
+  }
+  if (data && start < end && !this._ignoreData) {
+    if (this._isPreamble || !this._inHeader) {
+      if (buf) { shouldWriteMore = this._part.push(buf) }
+      shouldWriteMore = this._part.push(data.slice(start, end))
+      if (!shouldWriteMore) { this._pause = true }
+    } else if (!this._isPreamble && this._inHeader) {
+      if (buf) { this._hparser.push(buf) }
+      r = this._hparser.push(data.slice(start, end))
+      if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
+    }
+  }
+  if (isMatch) {
+    this._hparser.reset()
+    if (this._isPreamble) { this._isPreamble = false } else {
+      if (start !== end) {
+        ++this._parts
+        this._part.on('end', function () {
+          if (--self._parts === 0) {
+            if (self._finished) {
+              self._realFinish = true
+              self.emit('finish')
+              self._realFinish = false
+            } else {
+              self._unpause()
+            }
+          }
+        })
+      }
+    }
+    this._part.push(null)
+    this._part = undefined
+    this._ignoreData = false
+    this._justMatched = true
+    this._dashes = 0
+  }
+}
+
+Dicer.prototype._unpause = function () {
+  if (!this._pause) { return }
+
+  this._pause = false
+  if (this._cb) {
+    const cb = this._cb
+    this._cb = undefined
+    cb()
+  }
+}
+
+module.exports = Dicer
+
+
+/***/ }),
+
+/***/ 6890:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const EventEmitter = (__nccwpck_require__(8474).EventEmitter)
+const inherits = (__nccwpck_require__(7975).inherits)
+const getLimit = __nccwpck_require__(1954)
+
+const StreamSearch = __nccwpck_require__(439)
+
+const B_DCRLF = Buffer.from('\r\n\r\n')
+const RE_CRLF = /\r\n/g
+const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
+
+function HeaderParser (cfg) {
+  EventEmitter.call(this)
+
+  cfg = cfg || {}
+  const self = this
+  this.nread = 0
+  this.maxed = false
+  this.npairs = 0
+  this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
+  this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
+  this.buffer = ''
+  this.header = {}
+  this.finished = false
+  this.ss = new StreamSearch(B_DCRLF)
+  this.ss.on('info', function (isMatch, data, start, end) {
+    if (data && !self.maxed) {
+      if (self.nread + end - start >= self.maxHeaderSize) {
+        end = self.maxHeaderSize - self.nread + start
+        self.nread = self.maxHeaderSize
+        self.maxed = true
+      } else { self.nread += (end - start) }
+
+      self.buffer += data.toString('binary', start, end)
+    }
+    if (isMatch) { self._finish() }
+  })
+}
+inherits(HeaderParser, EventEmitter)
+
+HeaderParser.prototype.push = function (data) {
+  const r = this.ss.push(data)
+  if (this.finished) { return r }
+}
+
+HeaderParser.prototype.reset = function () {
+  this.finished = false
+  this.buffer = ''
+  this.header = {}
+  this.ss.reset()
+}
+
+HeaderParser.prototype._finish = function () {
+  if (this.buffer) { this._parseHeader() }
+  this.ss.matches = this.ss.maxMatches
+  const header = this.header
+  this.header = {}
+  this.buffer = ''
+  this.finished = true
+  this.nread = this.npairs = 0
+  this.maxed = false
+  this.emit('header', header)
+}
+
+HeaderParser.prototype._parseHeader = function () {
+  if (this.npairs === this.maxHeaderPairs) { return }
+
+  const lines = this.buffer.split(RE_CRLF)
+  const len = lines.length
+  let m, h
+
+  for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+    if (lines[i].length === 0) { continue }
+    if (lines[i][0] === '\t' || lines[i][0] === ' ') {
+      // folded header content
+      // RFC2822 says to just remove the CRLF and not the whitespace following
+      // it, so we follow the RFC and include the leading whitespace ...
+      if (h) {
+        this.header[h][this.header[h].length - 1] += lines[i]
+        continue
+      }
+    }
+
+    const posColon = lines[i].indexOf(':')
+    if (
+      posColon === -1 ||
+      posColon === 0
+    ) {
+      return
+    }
+    m = RE_HDR.exec(lines[i])
+    h = m[1].toLowerCase()
+    this.header[h] = this.header[h] || []
+    this.header[h].push((m[2] || ''))
+    if (++this.npairs === this.maxHeaderPairs) { break }
+  }
+}
+
+module.exports = HeaderParser
+
+
+/***/ }),
+
+/***/ 5341:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const inherits = (__nccwpck_require__(7975).inherits)
+const ReadableStream = (__nccwpck_require__(7075).Readable)
+
+function PartStream (opts) {
+  ReadableStream.call(this, opts)
+}
+inherits(PartStream, ReadableStream)
+
+PartStream.prototype._read = function (n) {}
+
+module.exports = PartStream
+
+
+/***/ }),
+
+/***/ 439:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+/**
+ * Copyright Brian White. All rights reserved.
+ *
+ * @see https://github.com/mscdex/streamsearch
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ *
+ * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
+ * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
+ */
+const EventEmitter = (__nccwpck_require__(8474).EventEmitter)
+const inherits = (__nccwpck_require__(7975).inherits)
+
+function SBMH (needle) {
+  if (typeof needle === 'string') {
+    needle = Buffer.from(needle)
+  }
+
+  if (!Buffer.isBuffer(needle)) {
+    throw new TypeError('The needle has to be a String or a Buffer.')
+  }
+
+  const needleLength = needle.length
+
+  if (needleLength === 0) {
+    throw new Error('The needle cannot be an empty String/Buffer.')
+  }
+
+  if (needleLength > 256) {
+    throw new Error('The needle cannot have a length bigger than 256.')
+  }
+
+  this.maxMatches = Infinity
+  this.matches = 0
+
+  this._occ = new Array(256)
+    .fill(needleLength) // Initialize occurrence table.
+  this._lookbehind_size = 0
+  this._needle = needle
+  this._bufpos = 0
+
+  this._lookbehind = Buffer.alloc(needleLength)
+
+  // Populate occurrence table with analysis of the needle,
+  // ignoring last letter.
+  for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
+    this._occ[needle[i]] = needleLength - 1 - i
+  }
+}
+inherits(SBMH, EventEmitter)
+
+SBMH.prototype.reset = function () {
+  this._lookbehind_size = 0
+  this.matches = 0
+  this._bufpos = 0
+}
+
+SBMH.prototype.push = function (chunk, pos) {
+  if (!Buffer.isBuffer(chunk)) {
+    chunk = Buffer.from(chunk, 'binary')
+  }
+  const chlen = chunk.length
+  this._bufpos = pos || 0
+  let r
+  while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
+  return r
+}
+
+SBMH.prototype._sbmh_feed = function (data) {
+  const len = data.length
+  const needle = this._needle
+  const needleLength = needle.length
+  const lastNeedleChar = needle[needleLength - 1]
+
+  // Positive: points to a position in `data`
+  //           pos == 3 points to data[3]
+  // Negative: points to a position in the lookbehind buffer
+  //           pos == -2 points to lookbehind[lookbehind_size - 2]
+  let pos = -this._lookbehind_size
+  let ch
+
+  if (pos < 0) {
+    // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
+    // search with character lookup code that considers both the
+    // lookbehind buffer and the current round's haystack data.
+    //
+    // Loop until
+    //   there is a match.
+    // or until
+    //   we've moved past the position that requires the
+    //   lookbehind buffer. In this case we switch to the
+    //   optimized loop.
+    // or until
+    //   the character to look at lies outside the haystack.
+    while (pos < 0 && pos <= len - needleLength) {
+      ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
+
+      if (
+        ch === lastNeedleChar &&
+        this._sbmh_memcmp(data, pos, needleLength - 1)
+      ) {
+        this._lookbehind_size = 0
+        ++this.matches
+        this.emit('info', true)
+
+        return (this._bufpos = pos + needleLength)
+      }
+      pos += this._occ[ch]
+    }
+
+    // No match.
+
+    if (pos < 0) {
+      // There's too few data for Boyer-Moore-Horspool to run,
+      // so let's use a different algorithm to skip as much as
+      // we can.
+      // Forward pos until
+      //   the trailing part of lookbehind + data
+      //   looks like the beginning of the needle
+      // or until
+      //   pos == 0
+      while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
+    }
+
+    if (pos >= 0) {
+      // Discard lookbehind buffer.
+      this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
+      this._lookbehind_size = 0
+    } else {
+      // Cut off part of the lookbehind buffer that has
+      // been processed and append the entire haystack
+      // into it.
+      const bytesToCutOff = this._lookbehind_size + pos
+      if (bytesToCutOff > 0) {
+        // The cut off data is guaranteed not to contain the needle.
+        this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
+      }
+
+      this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
+        this._lookbehind_size - bytesToCutOff)
+      this._lookbehind_size -= bytesToCutOff
+
+      data.copy(this._lookbehind, this._lookbehind_size)
+      this._lookbehind_size += len
+
+      this._bufpos = len
+      return len
+    }
+  }
+
+  pos += (pos >= 0) * this._bufpos
+
+  // Lookbehind buffer is now empty. We only need to check if the
+  // needle is in the haystack.
+  if (data.indexOf(needle, pos) !== -1) {
+    pos = data.indexOf(needle, pos)
+    ++this.matches
+    if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
+
+    return (this._bufpos = pos + needleLength)
+  } else {
+    pos = len - needleLength
+  }
+
+  // There was no match. If there's trailing haystack data that we cannot
+  // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
+  // data is less than the needle size) then match using a modified
+  // algorithm that starts matching from the beginning instead of the end.
+  // Whatever trailing data is left after running this algorithm is added to
+  // the lookbehind buffer.
+  while (
+    pos < len &&
+    (
+      data[pos] !== needle[0] ||
+      (
+        (Buffer.compare(
+          data.subarray(pos, pos + len - pos),
+          needle.subarray(0, len - pos)
+        ) !== 0)
+      )
+    )
+  ) {
+    ++pos
+  }
+  if (pos < len) {
+    data.copy(this._lookbehind, 0, pos, pos + (len - pos))
+    this._lookbehind_size = len - pos
+  }
+
+  // Everything until pos is guaranteed not to contain needle data.
+  if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
+
+  this._bufpos = len
+  return len
+}
+
+SBMH.prototype._sbmh_lookup_char = function (data, pos) {
+  return (pos < 0)
+    ? this._lookbehind[this._lookbehind_size + pos]
+    : data[pos]
+}
+
+SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
+  for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+    if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
+  }
+  return true
+}
+
+module.exports = SBMH
+
+
+/***/ }),
+
+/***/ 9766:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const WritableStream = (__nccwpck_require__(7075).Writable)
+const { inherits } = __nccwpck_require__(7975)
+const Dicer = __nccwpck_require__(6917)
+
+const MultipartParser = __nccwpck_require__(4945)
+const UrlencodedParser = __nccwpck_require__(1724)
+const parseParams = __nccwpck_require__(6812)
+
+function Busboy (opts) {
+  if (!(this instanceof Busboy)) { return new Busboy(opts) }
+
+  if (typeof opts !== 'object') {
+    throw new TypeError('Busboy expected an options-Object.')
+  }
+  if (typeof opts.headers !== 'object') {
+    throw new TypeError('Busboy expected an options-Object with headers-attribute.')
+  }
+  if (typeof opts.headers['content-type'] !== 'string') {
+    throw new TypeError('Missing Content-Type-header.')
+  }
+
+  const {
+    headers,
+    ...streamOptions
+  } = opts
+
+  this.opts = {
+    autoDestroy: false,
+    ...streamOptions
+  }
+  WritableStream.call(this, this.opts)
+
+  this._done = false
+  this._parser = this.getParserByHeaders(headers)
+  this._finished = false
+}
+inherits(Busboy, WritableStream)
+
+Busboy.prototype.emit = function (ev) {
+  if (ev === 'finish') {
+    if (!this._done) {
+      this._parser?.end()
+      return
+    } else if (this._finished) {
+      return
+    }
+    this._finished = true
+  }
+  WritableStream.prototype.emit.apply(this, arguments)
+}
+
+Busboy.prototype.getParserByHeaders = function (headers) {
+  const parsed = parseParams(headers['content-type'])
+
+  const cfg = {
+    defCharset: this.opts.defCharset,
+    fileHwm: this.opts.fileHwm,
+    headers,
+    highWaterMark: this.opts.highWaterMark,
+    isPartAFile: this.opts.isPartAFile,
+    limits: this.opts.limits,
+    parsedConType: parsed,
+    preservePath: this.opts.preservePath
+  }
+
+  if (MultipartParser.detect.test(parsed[0])) {
+    return new MultipartParser(this, cfg)
+  }
+  if (UrlencodedParser.detect.test(parsed[0])) {
+    return new UrlencodedParser(this, cfg)
+  }
+  throw new Error('Unsupported Content-Type.')
+}
+
+Busboy.prototype._write = function (chunk, encoding, cb) {
+  this._parser.write(chunk, cb)
+}
+
+module.exports = Busboy
+module.exports["default"] = Busboy
+module.exports.Busboy = Busboy
+
+module.exports.Dicer = Dicer
+
+
+/***/ }),
+
+/***/ 4945:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+// TODO:
+//  * support 1 nested multipart level
+//    (see second multipart example here:
+//     http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
+//  * support limits.fieldNameSize
+//     -- this will require modifications to utils.parseParams
+
+const { Readable } = __nccwpck_require__(7075)
+const { inherits } = __nccwpck_require__(7975)
+
+const Dicer = __nccwpck_require__(6917)
+
+const parseParams = __nccwpck_require__(6812)
+const decodeText = __nccwpck_require__(3772)
+const basename = __nccwpck_require__(4935)
+const getLimit = __nccwpck_require__(1954)
+
+const RE_BOUNDARY = /^boundary$/i
+const RE_FIELD = /^form-data$/i
+const RE_CHARSET = /^charset$/i
+const RE_FILENAME = /^filename$/i
+const RE_NAME = /^name$/i
+
+Multipart.detect = /^multipart\/form-data/i
+function Multipart (boy, cfg) {
+  let i
+  let len
+  const self = this
+  let boundary
+  const limits = cfg.limits
+  const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
+  const parsedConType = cfg.parsedConType || []
+  const defCharset = cfg.defCharset || 'utf8'
+  const preservePath = cfg.preservePath
+  const fileOpts = { highWaterMark: cfg.fileHwm }
+
+  for (i = 0, len = parsedConType.length; i < len; ++i) {
+    if (Array.isArray(parsedConType[i]) &&
+      RE_BOUNDARY.test(parsedConType[i][0])) {
+      boundary = parsedConType[i][1]
+      break
+    }
+  }
+
+  function checkFinished () {
+    if (nends === 0 && finished && !boy._done) {
+      finished = false
+      self.end()
+    }
+  }
+
+  if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
+
+  const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+  const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
+  const filesLimit = getLimit(limits, 'files', Infinity)
+  const fieldsLimit = getLimit(limits, 'fields', Infinity)
+  const partsLimit = getLimit(limits, 'parts', Infinity)
+  const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
+  const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
+
+  let nfiles = 0
+  let nfields = 0
+  let nends = 0
+  let curFile
+  let curField
+  let finished = false
+
+  this._needDrain = false
+  this._pause = false
+  this._cb = undefined
+  this._nparts = 0
+  this._boy = boy
+
+  const parserCfg = {
+    boundary,
+    maxHeaderPairs: headerPairsLimit,
+    maxHeaderSize: headerSizeLimit,
+    partHwm: fileOpts.highWaterMark,
+    highWaterMark: cfg.highWaterMark
+  }
+
+  this.parser = new Dicer(parserCfg)
+  this.parser.on('drain', function () {
+    self._needDrain = false
+    if (self._cb && !self._pause) {
+      const cb = self._cb
+      self._cb = undefined
+      cb()
+    }
+  }).on('part', function onPart (part) {
+    if (++self._nparts > partsLimit) {
+      self.parser.removeListener('part', onPart)
+      self.parser.on('part', skipPart)
+      boy.hitPartsLimit = true
+      boy.emit('partsLimit')
+      return skipPart(part)
+    }
+
+    // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
+    // us emit 'end' early since we know the part has ended if we are already
+    // seeing the next part
+    if (curField) {
+      const field = curField
+      field.emit('end')
+      field.removeAllListeners('end')
+    }
+
+    part.on('header', function (header) {
+      let contype
+      let fieldname
+      let parsed
+      let charset
+      let encoding
+      let filename
+      let nsize = 0
+
+      if (header['content-type']) {
+        parsed = parseParams(header['content-type'][0])
+        if (parsed[0]) {
+          contype = parsed[0].toLowerCase()
+          for (i = 0, len = parsed.length; i < len; ++i) {
+            if (RE_CHARSET.test(parsed[i][0])) {
+              charset = parsed[i][1].toLowerCase()
+              break
+            }
+          }
+        }
+      }
+
+      if (contype === undefined) { contype = 'text/plain' }
+      if (charset === undefined) { charset = defCharset }
+
+      if (header['content-disposition']) {
+        parsed = parseParams(header['content-disposition'][0])
+        if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
+        for (i = 0, len = parsed.length; i < len; ++i) {
+          if (RE_NAME.test(parsed[i][0])) {
+            fieldname = parsed[i][1]
+          } else if (RE_FILENAME.test(parsed[i][0])) {
+            filename = parsed[i][1]
+            if (!preservePath) { filename = basename(filename) }
+          }
+        }
+      } else { return skipPart(part) }
+
+      if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
+
+      let onData,
+        onEnd
+
+      if (isPartAFile(fieldname, contype, filename)) {
+        // file/binary field
+        if (nfiles === filesLimit) {
+          if (!boy.hitFilesLimit) {
+            boy.hitFilesLimit = true
+            boy.emit('filesLimit')
+          }
+          return skipPart(part)
+        }
+
+        ++nfiles
+
+        if (boy.listenerCount('file') === 0) {
+          self.parser._ignore()
+          return
+        }
+
+        ++nends
+        const file = new FileStream(fileOpts)
+        curFile = file
+        file.on('end', function () {
+          --nends
+          self._pause = false
+          checkFinished()
+          if (self._cb && !self._needDrain) {
+            const cb = self._cb
+            self._cb = undefined
+            cb()
+          }
+        })
+        file._read = function (n) {
+          if (!self._pause) { return }
+          self._pause = false
+          if (self._cb && !self._needDrain) {
+            const cb = self._cb
+            self._cb = undefined
+            cb()
+          }
+        }
+        boy.emit('file', fieldname, file, filename, encoding, contype)
+
+        onData = function (data) {
+          if ((nsize += data.length) > fileSizeLimit) {
+            const extralen = fileSizeLimit - nsize + data.length
+            if (extralen > 0) { file.push(data.slice(0, extralen)) }
+            file.truncated = true
+            file.bytesRead = fileSizeLimit
+            part.removeAllListeners('data')
+            file.emit('limit')
+            return
+          } else if (!file.push(data)) { self._pause = true }
+
+          file.bytesRead = nsize
+        }
+
+        onEnd = function () {
+          curFile = undefined
+          file.push(null)
+        }
+      } else {
+        // non-file field
+        if (nfields === fieldsLimit) {
+          if (!boy.hitFieldsLimit) {
+            boy.hitFieldsLimit = true
+            boy.emit('fieldsLimit')
+          }
+          return skipPart(part)
+        }
+
+        ++nfields
+        ++nends
+        let buffer = ''
+        let truncated = false
+        curField = part
+
+        onData = function (data) {
+          if ((nsize += data.length) > fieldSizeLimit) {
+            const extralen = (fieldSizeLimit - (nsize - data.length))
+            buffer += data.toString('binary', 0, extralen)
+            truncated = true
+            part.removeAllListeners('data')
+          } else { buffer += data.toString('binary') }
+        }
+
+        onEnd = function () {
+          curField = undefined
+          if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
+          boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
+          --nends
+          checkFinished()
+        }
+      }
+
+      /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
+         broken. Streams2/streams3 is a huge black box of confusion, but
+         somehow overriding the sync state seems to fix things again (and still
+         seems to work for previous node versions).
+      */
+      part._readableState.sync = false
+
+      part.on('data', onData)
+      part.on('end', onEnd)
+    }).on('error', function (err) {
+      if (curFile) { curFile.emit('error', err) }
+    })
+  }).on('error', function (err) {
+    boy.emit('error', err)
+  }).on('finish', function () {
+    finished = true
+    checkFinished()
+  })
+}
+
+Multipart.prototype.write = function (chunk, cb) {
+  const r = this.parser.write(chunk)
+  if (r && !this._pause) {
+    cb()
+  } else {
+    this._needDrain = !r
+    this._cb = cb
+  }
+}
+
+Multipart.prototype.end = function () {
+  const self = this
+
+  if (self.parser.writable) {
+    self.parser.end()
+  } else if (!self._boy._done) {
+    process.nextTick(function () {
+      self._boy._done = true
+      self._boy.emit('finish')
+    })
+  }
+}
+
+function skipPart (part) {
+  part.resume()
+}
+
+function FileStream (opts) {
+  Readable.call(this, opts)
+
+  this.bytesRead = 0
+
+  this.truncated = false
+}
+
+inherits(FileStream, Readable)
+
+FileStream.prototype._read = function (n) {}
+
+module.exports = Multipart
+
+
+/***/ }),
+
+/***/ 1724:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+
+
+const Decoder = __nccwpck_require__(393)
+const decodeText = __nccwpck_require__(3772)
+const getLimit = __nccwpck_require__(1954)
+
+const RE_CHARSET = /^charset$/i
+
+UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
+function UrlEncoded (boy, cfg) {
+  const limits = cfg.limits
+  const parsedConType = cfg.parsedConType
+  this.boy = boy
+
+  this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+  this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
+  this.fieldsLimit = getLimit(limits, 'fields', Infinity)
+
+  let charset
+  for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
+    if (Array.isArray(parsedConType[i]) &&
+        RE_CHARSET.test(parsedConType[i][0])) {
+      charset = parsedConType[i][1].toLowerCase()
+      break
+    }
+  }
+
+  if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
+
+  this.decoder = new Decoder()
+  this.charset = charset
+  this._fields = 0
+  this._state = 'key'
+  this._checkingBytes = true
+  this._bytesKey = 0
+  this._bytesVal = 0
+  this._key = ''
+  this._val = ''
+  this._keyTrunc = false
+  this._valTrunc = false
+  this._hitLimit = false
+}
+
+UrlEncoded.prototype.write = function (data, cb) {
+  if (this._fields === this.fieldsLimit) {
+    if (!this.boy.hitFieldsLimit) {
+      this.boy.hitFieldsLimit = true
+      this.boy.emit('fieldsLimit')
+    }
+    return cb()
+  }
+
+  let idxeq; let idxamp; let i; let p = 0; const len = data.length
+
+  while (p < len) {
+    if (this._state === 'key') {
+      idxeq = idxamp = undefined
+      for (i = p; i < len; ++i) {
+        if (!this._checkingBytes) { ++p }
+        if (data[i] === 0x3D/* = */) {
+          idxeq = i
+          break
+        } else if (data[i] === 0x26/* & */) {
+          idxamp = i
+          break
+        }
+        if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+          this._hitLimit = true
+          break
+        } else if (this._checkingBytes) { ++this._bytesKey }
+      }
+
+      if (idxeq !== undefined) {
+        // key with assignment
+        if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
+        this._state = 'val'
+
+        this._hitLimit = false
+        this._checkingBytes = true
+        this._val = ''
+        this._bytesVal = 0
+        this._valTrunc = false
+        this.decoder.reset()
+
+        p = idxeq + 1
+      } else if (idxamp !== undefined) {
+        // key with no assignment
+        ++this._fields
+        let key; const keyTrunc = this._keyTrunc
+        if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
+
+        this._hitLimit = false
+        this._checkingBytes = true
+        this._key = ''
+        this._bytesKey = 0
+        this._keyTrunc = false
+        this.decoder.reset()
+
+        if (key.length) {
+          this.boy.emit('field', decodeText(key, 'binary', this.charset),
+            '',
+            keyTrunc,
+            false)
+        }
+
+        p = idxamp + 1
+        if (this._fields === this.fieldsLimit) { return cb() }
+      } else if (this._hitLimit) {
+        // we may not have hit the actual limit if there are encoded bytes...
+        if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
+        p = i
+        if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+          // yep, we actually did hit the limit
+          this._checkingBytes = false
+          this._keyTrunc = true
+        }
+      } else {
+        if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
+        p = len
+      }
+    } else {
+      idxamp = undefined
+      for (i = p; i < len; ++i) {
+        if (!this._checkingBytes) { ++p }
+        if (data[i] === 0x26/* & */) {
+          idxamp = i
+          break
+        }
+        if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+          this._hitLimit = true
+          break
+        } else if (this._checkingBytes) { ++this._bytesVal }
+      }
+
+      if (idxamp !== undefined) {
+        ++this._fields
+        if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
+        this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+          decodeText(this._val, 'binary', this.charset),
+          this._keyTrunc,
+          this._valTrunc)
+        this._state = 'key'
+
+        this._hitLimit = false
+        this._checkingBytes = true
+        this._key = ''
+        this._bytesKey = 0
+        this._keyTrunc = false
+        this.decoder.reset()
+
+        p = idxamp + 1
+        if (this._fields === this.fieldsLimit) { return cb() }
+      } else if (this._hitLimit) {
+        // we may not have hit the actual limit if there are encoded bytes...
+        if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
+        p = i
+        if ((this._val === '' && this.fieldSizeLimit === 0) ||
+            (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
+          // yep, we actually did hit the limit
+          this._checkingBytes = false
+          this._valTrunc = true
+        }
+      } else {
+        if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
+        p = len
+      }
+    }
+  }
+  cb()
+}
+
+UrlEncoded.prototype.end = function () {
+  if (this.boy._done) { return }
+
+  if (this._state === 'key' && this._key.length > 0) {
+    this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+      '',
+      this._keyTrunc,
+      false)
+  } else if (this._state === 'val') {
+    this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+      decodeText(this._val, 'binary', this.charset),
+      this._keyTrunc,
+      this._valTrunc)
+  }
+  this.boy._done = true
+  this.boy.emit('finish')
+}
+
+module.exports = UrlEncoded
+
+
+/***/ }),
+
+/***/ 393:
+/***/ ((module) => {
+
+
+
+const RE_PLUS = /\+/g
+
+const HEX = [
+  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
+  0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+]
+
+function Decoder () {
+  this.buffer = undefined
+}
+Decoder.prototype.write = function (str) {
+  // Replace '+' with ' ' before decoding
+  str = str.replace(RE_PLUS, ' ')
+  let res = ''
+  let i = 0; let p = 0; const len = str.length
+  for (; i < len; ++i) {
+    if (this.buffer !== undefined) {
+      if (!HEX[str.charCodeAt(i)]) {
+        res += '%' + this.buffer
+        this.buffer = undefined
+        --i // retry character
+      } else {
+        this.buffer += str[i]
+        ++p
+        if (this.buffer.length === 2) {
+          res += String.fromCharCode(parseInt(this.buffer, 16))
+          this.buffer = undefined
+        }
+      }
+    } else if (str[i] === '%') {
+      if (i > p) {
+        res += str.substring(p, i)
+        p = i
+      }
+      this.buffer = ''
+      ++p
+    }
+  }
+  if (p < len && this.buffer === undefined) { res += str.substring(p) }
+  return res
+}
+Decoder.prototype.reset = function () {
+  this.buffer = undefined
+}
+
+module.exports = Decoder
+
+
+/***/ }),
+
+/***/ 4935:
+/***/ ((module) => {
+
+
+
+module.exports = function basename (path) {
+  if (typeof path !== 'string') { return '' }
+  for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
+    switch (path.charCodeAt(i)) {
+      case 0x2F: // '/'
+      case 0x5C: // '\'
+        path = path.slice(i + 1)
+        return (path === '..' || path === '.' ? '' : path)
+    }
+  }
+  return (path === '..' || path === '.' ? '' : path)
+}
+
+
+/***/ }),
+
+/***/ 3772:
+/***/ (function(module) {
+
+
+
+// Node has always utf-8
+const utf8Decoder = new TextDecoder('utf-8')
+const textDecoders = new Map([
+  ['utf-8', utf8Decoder],
+  ['utf8', utf8Decoder]
+])
+
+function getDecoder (charset) {
+  let lc
+  while (true) {
+    switch (charset) {
+      case 'utf-8':
+      case 'utf8':
+        return decoders.utf8
+      case 'latin1':
+      case 'ascii': // TODO: Make these a separate, strict decoder?
+      case 'us-ascii':
+      case 'iso-8859-1':
+      case 'iso8859-1':
+      case 'iso88591':
+      case 'iso_8859-1':
+      case 'windows-1252':
+      case 'iso_8859-1:1987':
+      case 'cp1252':
+      case 'x-cp1252':
+        return decoders.latin1
+      case 'utf16le':
+      case 'utf-16le':
+      case 'ucs2':
+      case 'ucs-2':
+        return decoders.utf16le
+      case 'base64':
+        return decoders.base64
+      default:
+        if (lc === undefined) {
+          lc = true
+          charset = charset.toLowerCase()
+          continue
+        }
+        return decoders.other.bind(charset)
+    }
+  }
+}
+
+const decoders = {
+  utf8: (data, sourceEncoding) => {
+    if (data.length === 0) {
+      return ''
+    }
+    if (typeof data === 'string') {
+      data = Buffer.from(data, sourceEncoding)
+    }
+    return data.utf8Slice(0, data.length)
+  },
+
+  latin1: (data, sourceEncoding) => {
+    if (data.length === 0) {
+      return ''
+    }
+    if (typeof data === 'string') {
+      return data
+    }
+    return data.latin1Slice(0, data.length)
+  },
+
+  utf16le: (data, sourceEncoding) => {
+    if (data.length === 0) {
+      return ''
+    }
+    if (typeof data === 'string') {
+      data = Buffer.from(data, sourceEncoding)
+    }
+    return data.ucs2Slice(0, data.length)
+  },
+
+  base64: (data, sourceEncoding) => {
+    if (data.length === 0) {
+      return ''
+    }
+    if (typeof data === 'string') {
+      data = Buffer.from(data, sourceEncoding)
+    }
+    return data.base64Slice(0, data.length)
+  },
+
+  other: (data, sourceEncoding) => {
+    if (data.length === 0) {
+      return ''
+    }
+    if (typeof data === 'string') {
+      data = Buffer.from(data, sourceEncoding)
+    }
+
+    if (textDecoders.has(this.toString())) {
+      try {
+        return textDecoders.get(this).decode(data)
+      } catch {}
+    }
+    return typeof data === 'string'
+      ? data
+      : data.toString()
+  }
+}
+
+function decodeText (text, sourceEncoding, destEncoding) {
+  if (text) {
+    return getDecoder(destEncoding)(text, sourceEncoding)
+  }
+  return text
+}
+
+module.exports = decodeText
+
+
+/***/ }),
+
+/***/ 1954:
+/***/ ((module) => {
+
+
+
+module.exports = function getLimit (limits, name, defaultLimit) {
+  if (
+    !limits ||
+    limits[name] === undefined ||
+    limits[name] === null
+  ) { return defaultLimit }
+
+  if (
+    typeof limits[name] !== 'number' ||
+    isNaN(limits[name])
+  ) { throw new TypeError('Limit ' + name + ' is not a valid number') }
+
+  return limits[name]
+}
+
+
+/***/ }),
+
+/***/ 6812:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+/* eslint-disable object-property-newline */
+
+
+const decodeText = __nccwpck_require__(3772)
+
+const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g
+
+const EncodedLookup = {
+  '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04',
+  '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09',
+  '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c',
+  '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e',
+  '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12',
+  '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17',
+  '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b',
+  '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d',
+  '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20',
+  '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25',
+  '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a',
+  '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c',
+  '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f',
+  '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33',
+  '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38',
+  '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b',
+  '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e',
+  '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41',
+  '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46',
+  '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a',
+  '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d',
+  '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f',
+  '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54',
+  '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59',
+  '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c',
+  '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e',
+  '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62',
+  '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67',
+  '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b',
+  '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d',
+  '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70',
+  '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75',
+  '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a',
+  '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c',
+  '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f',
+  '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83',
+  '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88',
+  '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b',
+  '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e',
+  '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91',
+  '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96',
+  '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a',
+  '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d',
+  '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f',
+  '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2',
+  '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4',
+  '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7',
+  '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9',
+  '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab',
+  '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac',
+  '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad',
+  '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae',
+  '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0',
+  '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2',
+  '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5',
+  '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7',
+  '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba',
+  '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb',
+  '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc',
+  '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd',
+  '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf',
+  '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0',
+  '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3',
+  '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5',
+  '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8',
+  '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca',
+  '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb',
+  '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc',
+  '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce',
+  '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf',
+  '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1',
+  '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3',
+  '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6',
+  '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8',
+  '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda',
+  '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb',
+  '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd',
+  '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde',
+  '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf',
+  '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1',
+  '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4',
+  '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6',
+  '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9',
+  '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea',
+  '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec',
+  '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed',
+  '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee',
+  '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef',
+  '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2',
+  '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4',
+  '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7',
+  '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9',
+  '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb',
+  '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc',
+  '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd',
+  '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe',
+  '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff'
+}
+
+function encodedReplacer (match) {
+  return EncodedLookup[match]
+}
+
+const STATE_KEY = 0
+const STATE_VALUE = 1
+const STATE_CHARSET = 2
+const STATE_LANG = 3
+
+function parseParams (str) {
+  const res = []
+  let state = STATE_KEY
+  let charset = ''
+  let inquote = false
+  let escaping = false
+  let p = 0
+  let tmp = ''
+  const len = str.length
+
+  for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+    const char = str[i]
+    if (char === '\\' && inquote) {
+      if (escaping) { escaping = false } else {
+        escaping = true
+        continue
+      }
+    } else if (char === '"') {
+      if (!escaping) {
+        if (inquote) {
+          inquote = false
+          state = STATE_KEY
+        } else { inquote = true }
+        continue
+      } else { escaping = false }
+    } else {
+      if (escaping && inquote) { tmp += '\\' }
+      escaping = false
+      if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") {
+        if (state === STATE_CHARSET) {
+          state = STATE_LANG
+          charset = tmp.substring(1)
+        } else { state = STATE_VALUE }
+        tmp = ''
+        continue
+      } else if (state === STATE_KEY &&
+        (char === '*' || char === '=') &&
+        res.length) {
+        state = char === '*'
+          ? STATE_CHARSET
+          : STATE_VALUE
+        res[p] = [tmp, undefined]
+        tmp = ''
+        continue
+      } else if (!inquote && char === ';') {
+        state = STATE_KEY
+        if (charset) {
+          if (tmp.length) {
+            tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+              'binary',
+              charset)
+          }
+          charset = ''
+        } else if (tmp.length) {
+          tmp = decodeText(tmp, 'binary', 'utf8')
+        }
+        if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
+        tmp = ''
+        ++p
+        continue
+      } else if (!inquote && (char === ' ' || char === '\t')) { continue }
+    }
+    tmp += char
+  }
+  if (charset && tmp.length) {
+    tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+      'binary',
+      charset)
+  } else if (tmp) {
+    tmp = decodeText(tmp, 'binary', 'utf8')
+  }
+
+  if (res[p] === undefined) {
+    if (tmp) { res[p] = tmp }
+  } else { res[p][1] = tmp }
+
+  return res
+}
+
+module.exports = parseParams
+
+
+/***/ })
+
+/******/ });
+/************************************************************************/
+/******/ // The module cache
+/******/ var __webpack_module_cache__ = {};
+/******/ 
+/******/ // The require function
+/******/ function __nccwpck_require__(moduleId) {
+/******/ 	// Check if module is in cache
+/******/ 	var cachedModule = __webpack_module_cache__[moduleId];
+/******/ 	if (cachedModule !== undefined) {
+/******/ 		return cachedModule.exports;
+/******/ 	}
+/******/ 	// Create a new module (and put it into the cache)
+/******/ 	var module = __webpack_module_cache__[moduleId] = {
+/******/ 		// no module.id needed
+/******/ 		// no module.loaded needed
+/******/ 		exports: {}
+/******/ 	};
+/******/ 
+/******/ 	// Execute the module function
+/******/ 	var threw = true;
+/******/ 	try {
+/******/ 		__webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
+/******/ 		threw = false;
+/******/ 	} finally {
+/******/ 		if(threw) delete __webpack_module_cache__[moduleId];
+/******/ 	}
+/******/ 
+/******/ 	// Return the exports of the module
+/******/ 	return module.exports;
+/******/ }
+/******/ 
+/************************************************************************/
+/******/ /* webpack/runtime/compat */
+/******/ 
+/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = new URL('.', import.meta.url).pathname.slice(import.meta.url.match(/^file:\/\/\/\w:/) ? 1 : 0, -1) + "/";
+/******/ 
+/************************************************************************/
+var __webpack_exports__ = {};
+
+// EXTERNAL MODULE: ./node_modules/.pnpm/@actions+core@1.11.1/node_modules/@actions/core/lib/core.js
+var core = __nccwpck_require__(9999);
+// EXTERNAL MODULE: ./node_modules/.pnpm/@actions+exec@1.1.1/node_modules/@actions/exec/lib/exec.js
+var exec = __nccwpck_require__(8872);
+;// CONCATENATED MODULE: external "node:fs"
+const external_node_fs_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:fs");
+// EXTERNAL MODULE: external "node:os"
+var external_node_os_ = __nccwpck_require__(8161);
+// EXTERNAL MODULE: external "node:util"
+var external_node_util_ = __nccwpck_require__(7975);
+// EXTERNAL MODULE: external "os"
+var external_os_ = __nccwpck_require__(857);
+;// CONCATENATED MODULE: external "node:fs/promises"
+const promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:fs/promises");
+// EXTERNAL MODULE: external "node:zlib"
+var external_node_zlib_ = __nccwpck_require__(8522);
+;// CONCATENATED MODULE: external "node:crypto"
+const external_node_crypto_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:crypto");
+;// CONCATENATED MODULE: external "node:timers/promises"
+const external_node_timers_promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:timers/promises");
+;// CONCATENATED MODULE: ./node_modules/.pnpm/@sindresorhus+is@7.0.1/node_modules/@sindresorhus/is/distribution/index.js
+const typedArrayTypeNames = [
+    'Int8Array',
+    'Uint8Array',
+    'Uint8ClampedArray',
+    'Int16Array',
+    'Uint16Array',
+    'Int32Array',
+    'Uint32Array',
+    'Float32Array',
+    'Float64Array',
+    'BigInt64Array',
+    'BigUint64Array',
+];
+function isTypedArrayName(name) {
+    return typedArrayTypeNames.includes(name);
+}
+const objectTypeNames = [
+    'Function',
+    'Generator',
+    'AsyncGenerator',
+    'GeneratorFunction',
+    'AsyncGeneratorFunction',
+    'AsyncFunction',
+    'Observable',
+    'Array',
+    'Buffer',
+    'Blob',
+    'Object',
+    'RegExp',
+    'Date',
+    'Error',
+    'Map',
+    'Set',
+    'WeakMap',
+    'WeakSet',
+    'WeakRef',
+    'ArrayBuffer',
+    'SharedArrayBuffer',
+    'DataView',
+    'Promise',
+    'URL',
+    'FormData',
+    'URLSearchParams',
+    'HTMLElement',
+    'NaN',
+    ...typedArrayTypeNames,
+];
+function isObjectTypeName(name) {
+    return objectTypeNames.includes(name);
+}
+const primitiveTypeNames = [
+    'null',
+    'undefined',
+    'string',
+    'number',
+    'bigint',
+    'boolean',
+    'symbol',
+];
+function isPrimitiveTypeName(name) {
+    return primitiveTypeNames.includes(name);
+}
+const assertionTypeDescriptions = [
+    'positive number',
+    'negative number',
+    'Class',
+    'string with a number',
+    'null or undefined',
+    'Iterable',
+    'AsyncIterable',
+    'native Promise',
+    'EnumCase',
+    'string with a URL',
+    'truthy',
+    'falsy',
+    'primitive',
+    'integer',
+    'plain object',
+    'TypedArray',
+    'array-like',
+    'tuple-like',
+    'Node.js Stream',
+    'infinite number',
+    'empty array',
+    'non-empty array',
+    'empty string',
+    'empty string or whitespace',
+    'non-empty string',
+    'non-empty string and not whitespace',
+    'empty object',
+    'non-empty object',
+    'empty set',
+    'non-empty set',
+    'empty map',
+    'non-empty map',
+    'PropertyKey',
+    'even integer',
+    'odd integer',
+    'T',
+    'in range',
+    'predicate returns truthy for any value',
+    'predicate returns truthy for all values',
+    'valid Date',
+    'valid length',
+    'whitespace string',
+    ...objectTypeNames,
+    ...primitiveTypeNames,
+];
+const getObjectType = (value) => {
+    const objectTypeName = Object.prototype.toString.call(value).slice(8, -1);
+    if (/HTML\w+Element/.test(objectTypeName) && isHtmlElement(value)) {
+        return 'HTMLElement';
+    }
+    if (isObjectTypeName(objectTypeName)) {
+        return objectTypeName;
+    }
+    return undefined;
+};
+function detect(value) {
+    if (value === null) {
+        return 'null';
+    }
+    switch (typeof value) {
+        case 'undefined': {
+            return 'undefined';
+        }
+        case 'string': {
+            return 'string';
+        }
+        case 'number': {
+            return Number.isNaN(value) ? 'NaN' : 'number';
+        }
+        case 'boolean': {
+            return 'boolean';
+        }
+        case 'function': {
+            return 'Function';
+        }
+        case 'bigint': {
+            return 'bigint';
+        }
+        case 'symbol': {
+            return 'symbol';
+        }
+        default:
+    }
+    if (isObservable(value)) {
+        return 'Observable';
+    }
+    if (isArray(value)) {
+        return 'Array';
+    }
+    if (isBuffer(value)) {
+        return 'Buffer';
+    }
+    const tagType = getObjectType(value);
+    if (tagType) {
+        return tagType;
+    }
+    if (value instanceof String || value instanceof Boolean || value instanceof Number) {
+        throw new TypeError('Please don\'t use object wrappers for primitive types');
+    }
+    return 'Object';
+}
+function hasPromiseApi(value) {
+    return isFunction(value?.then) && isFunction(value?.catch);
+}
+const is = Object.assign(detect, {
+    all: isAll,
+    any: isAny,
+    array: isArray,
+    arrayBuffer: isArrayBuffer,
+    arrayLike: isArrayLike,
+    asyncFunction: isAsyncFunction,
+    asyncGenerator: isAsyncGenerator,
+    asyncGeneratorFunction: isAsyncGeneratorFunction,
+    asyncIterable: isAsyncIterable,
+    bigint: isBigint,
+    bigInt64Array: isBigInt64Array,
+    bigUint64Array: isBigUint64Array,
+    blob: isBlob,
+    boolean: isBoolean,
+    boundFunction: isBoundFunction,
+    buffer: isBuffer,
+    class: isClass,
+    dataView: isDataView,
+    date: isDate,
+    detect,
+    directInstanceOf: isDirectInstanceOf,
+    emptyArray: isEmptyArray,
+    emptyMap: isEmptyMap,
+    emptyObject: isEmptyObject,
+    emptySet: isEmptySet,
+    emptyString: isEmptyString,
+    emptyStringOrWhitespace: isEmptyStringOrWhitespace,
+    enumCase: isEnumCase,
+    error: isError,
+    evenInteger: isEvenInteger,
+    falsy: isFalsy,
+    float32Array: isFloat32Array,
+    float64Array: isFloat64Array,
+    formData: isFormData,
+    function: isFunction,
+    generator: isGenerator,
+    generatorFunction: isGeneratorFunction,
+    htmlElement: isHtmlElement,
+    infinite: isInfinite,
+    inRange: isInRange,
+    int16Array: isInt16Array,
+    int32Array: isInt32Array,
+    int8Array: isInt8Array,
+    integer: isInteger,
+    iterable: isIterable,
+    map: isMap,
+    nan: isNan,
+    nativePromise: isNativePromise,
+    negativeNumber: isNegativeNumber,
+    nodeStream: isNodeStream,
+    nonEmptyArray: isNonEmptyArray,
+    nonEmptyMap: isNonEmptyMap,
+    nonEmptyObject: isNonEmptyObject,
+    nonEmptySet: isNonEmptySet,
+    nonEmptyString: isNonEmptyString,
+    nonEmptyStringAndNotWhitespace: isNonEmptyStringAndNotWhitespace,
+    null: isNull,
+    nullOrUndefined: isNullOrUndefined,
+    number: isNumber,
+    numericString: isNumericString,
+    object: isObject,
+    observable: isObservable,
+    oddInteger: isOddInteger,
+    plainObject: isPlainObject,
+    positiveNumber: isPositiveNumber,
+    primitive: isPrimitive,
+    promise: isPromise,
+    propertyKey: isPropertyKey,
+    regExp: isRegExp,
+    safeInteger: isSafeInteger,
+    set: isSet,
+    sharedArrayBuffer: isSharedArrayBuffer,
+    string: isString,
+    symbol: isSymbol,
+    truthy: isTruthy,
+    tupleLike: isTupleLike,
+    typedArray: isTypedArray,
+    uint16Array: isUint16Array,
+    uint32Array: isUint32Array,
+    uint8Array: isUint8Array,
+    uint8ClampedArray: isUint8ClampedArray,
+    undefined: isUndefined,
+    urlInstance: isUrlInstance,
+    urlSearchParams: isUrlSearchParams,
+    urlString: isUrlString,
+    validDate: isValidDate,
+    validLength: isValidLength,
+    weakMap: isWeakMap,
+    weakRef: isWeakRef,
+    weakSet: isWeakSet,
+    whitespaceString: isWhitespaceString,
+});
+function isAbsoluteModule2(remainder) {
+    return (value) => isInteger(value) && Math.abs(value % 2) === remainder;
+}
+function isAll(predicate, ...values) {
+    return predicateOnArray(Array.prototype.every, predicate, values);
+}
+function isAny(predicate, ...values) {
+    const predicates = isArray(predicate) ? predicate : [predicate];
+    return predicates.some(singlePredicate => predicateOnArray(Array.prototype.some, singlePredicate, values));
+}
+function isArray(value, assertion) {
+    if (!Array.isArray(value)) {
+        return false;
+    }
+    if (!isFunction(assertion)) {
+        return true;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+    return value.every(element => assertion(element));
+}
+function isArrayBuffer(value) {
+    return getObjectType(value) === 'ArrayBuffer';
+}
+function isArrayLike(value) {
+    return !isNullOrUndefined(value) && !isFunction(value) && isValidLength(value.length);
+}
+function isAsyncFunction(value) {
+    return getObjectType(value) === 'AsyncFunction';
+}
+function isAsyncGenerator(value) {
+    return isAsyncIterable(value) && isFunction(value.next) && isFunction(value.throw);
+}
+function isAsyncGeneratorFunction(value) {
+    return getObjectType(value) === 'AsyncGeneratorFunction';
+}
+function isAsyncIterable(value) {
+    return isFunction(value?.[Symbol.asyncIterator]);
+}
+function isBigint(value) {
+    return typeof value === 'bigint';
+}
+function isBigInt64Array(value) {
+    return getObjectType(value) === 'BigInt64Array';
+}
+function isBigUint64Array(value) {
+    return getObjectType(value) === 'BigUint64Array';
+}
+function isBlob(value) {
+    return getObjectType(value) === 'Blob';
+}
+function isBoolean(value) {
+    return value === true || value === false;
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isBoundFunction(value) {
+    return isFunction(value) && !Object.hasOwn(value, 'prototype');
+}
+/**
+Note: [Prefer using `Uint8Array` instead of `Buffer`.](https://sindresorhus.com/blog/goodbye-nodejs-buffer)
+*/
+function isBuffer(value) {
+    // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-call
+    return value?.constructor?.isBuffer?.(value) ?? false;
+}
+function isClass(value) {
+    return isFunction(value) && value.toString().startsWith('class ');
+}
+function isDataView(value) {
+    return getObjectType(value) === 'DataView';
+}
+function isDate(value) {
+    return getObjectType(value) === 'Date';
+}
+function isDirectInstanceOf(instance, class_) {
+    if (instance === undefined || instance === null) {
+        return false;
+    }
+    return Object.getPrototypeOf(instance) === class_.prototype;
+}
+function isEmptyArray(value) {
+    return isArray(value) && value.length === 0;
+}
+function isEmptyMap(value) {
+    return isMap(value) && value.size === 0;
+}
+function isEmptyObject(value) {
+    return isObject(value) && !isMap(value) && !isSet(value) && Object.keys(value).length === 0;
+}
+function isEmptySet(value) {
+    return isSet(value) && value.size === 0;
+}
+function isEmptyString(value) {
+    return isString(value) && value.length === 0;
+}
+function isEmptyStringOrWhitespace(value) {
+    return isEmptyString(value) || isWhitespaceString(value);
+}
+function isEnumCase(value, targetEnum) {
+    // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+    return Object.values(targetEnum).includes(value);
+}
+function isError(value) {
+    return getObjectType(value) === 'Error';
+}
+function isEvenInteger(value) {
+    return isAbsoluteModule2(0)(value);
+}
+// Example: `is.falsy = (value: unknown): value is (not true | 0 | '' | undefined | null) => Boolean(value);`
+function isFalsy(value) {
+    return !value;
+}
+function isFloat32Array(value) {
+    return getObjectType(value) === 'Float32Array';
+}
+function isFloat64Array(value) {
+    return getObjectType(value) === 'Float64Array';
+}
+function isFormData(value) {
+    return getObjectType(value) === 'FormData';
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isFunction(value) {
+    return typeof value === 'function';
+}
+function isGenerator(value) {
+    return isIterable(value) && isFunction(value?.next) && isFunction(value?.throw);
+}
+function isGeneratorFunction(value) {
+    return getObjectType(value) === 'GeneratorFunction';
+}
+// eslint-disable-next-line @typescript-eslint/naming-convention
+const NODE_TYPE_ELEMENT = 1;
+// eslint-disable-next-line @typescript-eslint/naming-convention
+const DOM_PROPERTIES_TO_CHECK = [
+    'innerHTML',
+    'ownerDocument',
+    'style',
+    'attributes',
+    'nodeValue',
+];
+function isHtmlElement(value) {
+    return isObject(value)
+        && value.nodeType === NODE_TYPE_ELEMENT
+        && isString(value.nodeName)
+        && !isPlainObject(value)
+        && DOM_PROPERTIES_TO_CHECK.every(property => property in value);
+}
+function isInfinite(value) {
+    return value === Number.POSITIVE_INFINITY || value === Number.NEGATIVE_INFINITY;
+}
+function isInRange(value, range) {
+    if (isNumber(range)) {
+        return value >= Math.min(0, range) && value <= Math.max(range, 0);
+    }
+    if (isArray(range) && range.length === 2) {
+        return value >= Math.min(...range) && value <= Math.max(...range);
+    }
+    throw new TypeError(`Invalid range: ${JSON.stringify(range)}`);
+}
+function isInt16Array(value) {
+    return getObjectType(value) === 'Int16Array';
+}
+function isInt32Array(value) {
+    return getObjectType(value) === 'Int32Array';
+}
+function isInt8Array(value) {
+    return getObjectType(value) === 'Int8Array';
+}
+function isInteger(value) {
+    return Number.isInteger(value);
+}
+function isIterable(value) {
+    return isFunction(value?.[Symbol.iterator]);
+}
+function isMap(value) {
+    return getObjectType(value) === 'Map';
+}
+function isNan(value) {
+    return Number.isNaN(value);
+}
+function isNativePromise(value) {
+    return getObjectType(value) === 'Promise';
+}
+function isNegativeNumber(value) {
+    return isNumber(value) && value < 0;
+}
+function isNodeStream(value) {
+    return isObject(value) && isFunction(value.pipe) && !isObservable(value);
+}
+function isNonEmptyArray(value) {
+    return isArray(value) && value.length > 0;
+}
+function isNonEmptyMap(value) {
+    return isMap(value) && value.size > 0;
+}
+// TODO: Use `not` operator here to remove `Map` and `Set` from type guard:
+// - https://github.com/Microsoft/TypeScript/pull/29317
+function isNonEmptyObject(value) {
+    return isObject(value) && !isMap(value) && !isSet(value) && Object.keys(value).length > 0;
+}
+function isNonEmptySet(value) {
+    return isSet(value) && value.size > 0;
+}
+// TODO: Use `not ''` when the `not` operator is available.
+function isNonEmptyString(value) {
+    return isString(value) && value.length > 0;
+}
+// TODO: Use `not ''` when the `not` operator is available.
+function isNonEmptyStringAndNotWhitespace(value) {
+    return isString(value) && !isEmptyStringOrWhitespace(value);
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isNull(value) {
+    return value === null;
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isNullOrUndefined(value) {
+    return isNull(value) || isUndefined(value);
+}
+function isNumber(value) {
+    return typeof value === 'number' && !Number.isNaN(value);
+}
+function isNumericString(value) {
+    return isString(value) && !isEmptyStringOrWhitespace(value) && !Number.isNaN(Number(value));
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isObject(value) {
+    return !isNull(value) && (typeof value === 'object' || isFunction(value));
+}
+function isObservable(value) {
+    if (!value) {
+        return false;
+    }
+    // eslint-disable-next-line no-use-extend-native/no-use-extend-native, @typescript-eslint/no-unsafe-call
+    if (value === value[Symbol.observable]?.()) {
+        return true;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+    if (value === value['@@observable']?.()) {
+        return true;
+    }
+    return false;
+}
+function isOddInteger(value) {
+    return isAbsoluteModule2(1)(value);
+}
+function isPlainObject(value) {
+    // From: https://github.com/sindresorhus/is-plain-obj/blob/main/index.js
+    if (typeof value !== 'object' || value === null) {
+        return false;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
+    const prototype = Object.getPrototypeOf(value);
+    return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value) && !(Symbol.iterator in value);
+}
+function isPositiveNumber(value) {
+    return isNumber(value) && value > 0;
+}
+function isPrimitive(value) {
+    return isNull(value) || isPrimitiveTypeName(typeof value);
+}
+function isPromise(value) {
+    return isNativePromise(value) || hasPromiseApi(value);
+}
+// `PropertyKey` is any value that can be used as an object key (string, number, or symbol)
+function isPropertyKey(value) {
+    return isAny([isString, isNumber, isSymbol], value);
+}
+function isRegExp(value) {
+    return getObjectType(value) === 'RegExp';
+}
+function isSafeInteger(value) {
+    return Number.isSafeInteger(value);
+}
+function isSet(value) {
+    return getObjectType(value) === 'Set';
+}
+function isSharedArrayBuffer(value) {
+    return getObjectType(value) === 'SharedArrayBuffer';
+}
+function isString(value) {
+    return typeof value === 'string';
+}
+function isSymbol(value) {
+    return typeof value === 'symbol';
+}
+// Example: `is.truthy = (value: unknown): value is (not false | not 0 | not '' | not undefined | not null) => Boolean(value);`
+// eslint-disable-next-line unicorn/prefer-native-coercion-functions
+function isTruthy(value) {
+    return Boolean(value);
+}
+function isTupleLike(value, guards) {
+    if (isArray(guards) && isArray(value) && guards.length === value.length) {
+        return guards.every((guard, index) => guard(value[index]));
+    }
+    return false;
+}
+function isTypedArray(value) {
+    return isTypedArrayName(getObjectType(value));
+}
+function isUint16Array(value) {
+    return getObjectType(value) === 'Uint16Array';
+}
+function isUint32Array(value) {
+    return getObjectType(value) === 'Uint32Array';
+}
+function isUint8Array(value) {
+    return getObjectType(value) === 'Uint8Array';
+}
+function isUint8ClampedArray(value) {
+    return getObjectType(value) === 'Uint8ClampedArray';
+}
+function isUndefined(value) {
+    return value === undefined;
+}
+function isUrlInstance(value) {
+    return getObjectType(value) === 'URL';
+}
+// eslint-disable-next-line unicorn/prevent-abbreviations
+function isUrlSearchParams(value) {
+    return getObjectType(value) === 'URLSearchParams';
+}
+function isUrlString(value) {
+    if (!isString(value)) {
+        return false;
+    }
+    try {
+        new URL(value); // eslint-disable-line no-new
+        return true;
+    }
+    catch {
+        return false;
+    }
+}
+function isValidDate(value) {
+    return isDate(value) && !isNan(Number(value));
+}
+function isValidLength(value) {
+    return isSafeInteger(value) && value >= 0;
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isWeakMap(value) {
+    return getObjectType(value) === 'WeakMap';
+}
+// eslint-disable-next-line @typescript-eslint/ban-types, unicorn/prevent-abbreviations
+function isWeakRef(value) {
+    return getObjectType(value) === 'WeakRef';
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function isWeakSet(value) {
+    return getObjectType(value) === 'WeakSet';
+}
+function isWhitespaceString(value) {
+    return isString(value) && /^\s+$/.test(value);
+}
+function predicateOnArray(method, predicate, values) {
+    if (!isFunction(predicate)) {
+        throw new TypeError(`Invalid predicate: ${JSON.stringify(predicate)}`);
+    }
+    if (values.length === 0) {
+        throw new TypeError('Invalid number of values');
+    }
+    return method.call(values, predicate);
+}
+function typeErrorMessage(description, value) {
+    return `Expected value which is \`${description}\`, received value of type \`${is(value)}\`.`;
+}
+function unique(values) {
+    // eslint-disable-next-line unicorn/prefer-spread
+    return Array.from(new Set(values));
+}
+const andFormatter = new Intl.ListFormat('en', { style: 'long', type: 'conjunction' });
+const orFormatter = new Intl.ListFormat('en', { style: 'long', type: 'disjunction' });
+function typeErrorMessageMultipleValues(expectedType, values) {
+    const uniqueExpectedTypes = unique((isArray(expectedType) ? expectedType : [expectedType]).map(value => `\`${value}\``));
+    const uniqueValueTypes = unique(values.map(value => `\`${is(value)}\``));
+    return `Expected values which are ${orFormatter.format(uniqueExpectedTypes)}. Received values of type${uniqueValueTypes.length > 1 ? 's' : ''} ${andFormatter.format(uniqueValueTypes)}.`;
+}
+const assert = {
+    all: assertAll,
+    any: assertAny,
+    array: assertArray,
+    arrayBuffer: assertArrayBuffer,
+    arrayLike: assertArrayLike,
+    asyncFunction: assertAsyncFunction,
+    asyncGenerator: assertAsyncGenerator,
+    asyncGeneratorFunction: assertAsyncGeneratorFunction,
+    asyncIterable: assertAsyncIterable,
+    bigint: assertBigint,
+    bigInt64Array: assertBigInt64Array,
+    bigUint64Array: assertBigUint64Array,
+    blob: assertBlob,
+    boolean: assertBoolean,
+    boundFunction: assertBoundFunction,
+    buffer: assertBuffer,
+    class: assertClass,
+    dataView: assertDataView,
+    date: assertDate,
+    directInstanceOf: assertDirectInstanceOf,
+    emptyArray: assertEmptyArray,
+    emptyMap: assertEmptyMap,
+    emptyObject: assertEmptyObject,
+    emptySet: assertEmptySet,
+    emptyString: assertEmptyString,
+    emptyStringOrWhitespace: assertEmptyStringOrWhitespace,
+    enumCase: assertEnumCase,
+    error: assertError,
+    evenInteger: assertEvenInteger,
+    falsy: assertFalsy,
+    float32Array: assertFloat32Array,
+    float64Array: assertFloat64Array,
+    formData: assertFormData,
+    function: assertFunction,
+    generator: assertGenerator,
+    generatorFunction: assertGeneratorFunction,
+    htmlElement: assertHtmlElement,
+    infinite: assertInfinite,
+    inRange: assertInRange,
+    int16Array: assertInt16Array,
+    int32Array: assertInt32Array,
+    int8Array: assertInt8Array,
+    integer: assertInteger,
+    iterable: assertIterable,
+    map: assertMap,
+    nan: assertNan,
+    nativePromise: assertNativePromise,
+    negativeNumber: assertNegativeNumber,
+    nodeStream: assertNodeStream,
+    nonEmptyArray: assertNonEmptyArray,
+    nonEmptyMap: assertNonEmptyMap,
+    nonEmptyObject: assertNonEmptyObject,
+    nonEmptySet: assertNonEmptySet,
+    nonEmptyString: assertNonEmptyString,
+    nonEmptyStringAndNotWhitespace: assertNonEmptyStringAndNotWhitespace,
+    null: assertNull,
+    nullOrUndefined: assertNullOrUndefined,
+    number: assertNumber,
+    numericString: assertNumericString,
+    object: assertObject,
+    observable: assertObservable,
+    oddInteger: assertOddInteger,
+    plainObject: assertPlainObject,
+    positiveNumber: assertPositiveNumber,
+    primitive: assertPrimitive,
+    promise: assertPromise,
+    propertyKey: assertPropertyKey,
+    regExp: assertRegExp,
+    safeInteger: assertSafeInteger,
+    set: assertSet,
+    sharedArrayBuffer: assertSharedArrayBuffer,
+    string: assertString,
+    symbol: assertSymbol,
+    truthy: assertTruthy,
+    tupleLike: assertTupleLike,
+    typedArray: assertTypedArray,
+    uint16Array: assertUint16Array,
+    uint32Array: assertUint32Array,
+    uint8Array: assertUint8Array,
+    uint8ClampedArray: assertUint8ClampedArray,
+    undefined: assertUndefined,
+    urlInstance: assertUrlInstance,
+    urlSearchParams: assertUrlSearchParams,
+    urlString: assertUrlString,
+    validDate: assertValidDate,
+    validLength: assertValidLength,
+    weakMap: assertWeakMap,
+    weakRef: assertWeakRef,
+    weakSet: assertWeakSet,
+    whitespaceString: assertWhitespaceString,
+};
+const methodTypeMap = {
+    isArray: 'Array',
+    isArrayBuffer: 'ArrayBuffer',
+    isArrayLike: 'array-like',
+    isAsyncFunction: 'AsyncFunction',
+    isAsyncGenerator: 'AsyncGenerator',
+    isAsyncGeneratorFunction: 'AsyncGeneratorFunction',
+    isAsyncIterable: 'AsyncIterable',
+    isBigint: 'bigint',
+    isBigInt64Array: 'BigInt64Array',
+    isBigUint64Array: 'BigUint64Array',
+    isBlob: 'Blob',
+    isBoolean: 'boolean',
+    isBoundFunction: 'Function',
+    isBuffer: 'Buffer',
+    isClass: 'Class',
+    isDataView: 'DataView',
+    isDate: 'Date',
+    isDirectInstanceOf: 'T',
+    isEmptyArray: 'empty array',
+    isEmptyMap: 'empty map',
+    isEmptyObject: 'empty object',
+    isEmptySet: 'empty set',
+    isEmptyString: 'empty string',
+    isEmptyStringOrWhitespace: 'empty string or whitespace',
+    isEnumCase: 'EnumCase',
+    isError: 'Error',
+    isEvenInteger: 'even integer',
+    isFalsy: 'falsy',
+    isFloat32Array: 'Float32Array',
+    isFloat64Array: 'Float64Array',
+    isFormData: 'FormData',
+    isFunction: 'Function',
+    isGenerator: 'Generator',
+    isGeneratorFunction: 'GeneratorFunction',
+    isHtmlElement: 'HTMLElement',
+    isInfinite: 'infinite number',
+    isInRange: 'in range',
+    isInt16Array: 'Int16Array',
+    isInt32Array: 'Int32Array',
+    isInt8Array: 'Int8Array',
+    isInteger: 'integer',
+    isIterable: 'Iterable',
+    isMap: 'Map',
+    isNan: 'NaN',
+    isNativePromise: 'native Promise',
+    isNegativeNumber: 'negative number',
+    isNodeStream: 'Node.js Stream',
+    isNonEmptyArray: 'non-empty array',
+    isNonEmptyMap: 'non-empty map',
+    isNonEmptyObject: 'non-empty object',
+    isNonEmptySet: 'non-empty set',
+    isNonEmptyString: 'non-empty string',
+    isNonEmptyStringAndNotWhitespace: 'non-empty string and not whitespace',
+    isNull: 'null',
+    isNullOrUndefined: 'null or undefined',
+    isNumber: 'number',
+    isNumericString: 'string with a number',
+    isObject: 'Object',
+    isObservable: 'Observable',
+    isOddInteger: 'odd integer',
+    isPlainObject: 'plain object',
+    isPositiveNumber: 'positive number',
+    isPrimitive: 'primitive',
+    isPromise: 'Promise',
+    isPropertyKey: 'PropertyKey',
+    isRegExp: 'RegExp',
+    isSafeInteger: 'integer',
+    isSet: 'Set',
+    isSharedArrayBuffer: 'SharedArrayBuffer',
+    isString: 'string',
+    isSymbol: 'symbol',
+    isTruthy: 'truthy',
+    isTupleLike: 'tuple-like',
+    isTypedArray: 'TypedArray',
+    isUint16Array: 'Uint16Array',
+    isUint32Array: 'Uint32Array',
+    isUint8Array: 'Uint8Array',
+    isUint8ClampedArray: 'Uint8ClampedArray',
+    isUndefined: 'undefined',
+    isUrlInstance: 'URL',
+    isUrlSearchParams: 'URLSearchParams',
+    isUrlString: 'string with a URL',
+    isValidDate: 'valid Date',
+    isValidLength: 'valid length',
+    isWeakMap: 'WeakMap',
+    isWeakRef: 'WeakRef',
+    isWeakSet: 'WeakSet',
+    isWhitespaceString: 'whitespace string',
+};
+function keysOf(value) {
+    return Object.keys(value);
+}
+const isMethodNames = keysOf(methodTypeMap);
+function isIsMethodName(value) {
+    return isMethodNames.includes(value);
+}
+function assertAll(predicate, ...values) {
+    if (!isAll(predicate, ...values)) {
+        const expectedType = isIsMethodName(predicate.name) ? methodTypeMap[predicate.name] : 'predicate returns truthy for all values';
+        throw new TypeError(typeErrorMessageMultipleValues(expectedType, values));
+    }
+}
+function assertAny(predicate, ...values) {
+    if (!isAny(predicate, ...values)) {
+        const predicates = isArray(predicate) ? predicate : [predicate];
+        const expectedTypes = predicates.map(predicate => isIsMethodName(predicate.name) ? methodTypeMap[predicate.name] : 'predicate returns truthy for any value');
+        throw new TypeError(typeErrorMessageMultipleValues(expectedTypes, values));
+    }
+}
+function assertArray(value, assertion, message) {
+    if (!isArray(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Array', value));
+    }
+    if (assertion) {
+        for (const element of value) {
+            // @ts-expect-error: "Assertions require every name in the call target to be declared with an explicit type annotation."
+            assertion(element, message);
+        }
+    }
+}
+function assertArrayBuffer(value, message) {
+    if (!isArrayBuffer(value)) {
+        throw new TypeError(message ?? typeErrorMessage('ArrayBuffer', value));
+    }
+}
+function assertArrayLike(value, message) {
+    if (!isArrayLike(value)) {
+        throw new TypeError(message ?? typeErrorMessage('array-like', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertAsyncFunction(value, message) {
+    if (!isAsyncFunction(value)) {
+        throw new TypeError(message ?? typeErrorMessage('AsyncFunction', value));
+    }
+}
+function assertAsyncGenerator(value, message) {
+    if (!isAsyncGenerator(value)) {
+        throw new TypeError(message ?? typeErrorMessage('AsyncGenerator', value));
+    }
+}
+function assertAsyncGeneratorFunction(value, message) {
+    if (!isAsyncGeneratorFunction(value)) {
+        throw new TypeError(message ?? typeErrorMessage('AsyncGeneratorFunction', value));
+    }
+}
+function assertAsyncIterable(value, message) {
+    if (!isAsyncIterable(value)) {
+        throw new TypeError(message ?? typeErrorMessage('AsyncIterable', value));
+    }
+}
+function assertBigint(value, message) {
+    if (!isBigint(value)) {
+        throw new TypeError(message ?? typeErrorMessage('bigint', value));
+    }
+}
+function assertBigInt64Array(value, message) {
+    if (!isBigInt64Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('BigInt64Array', value));
+    }
+}
+function assertBigUint64Array(value, message) {
+    if (!isBigUint64Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('BigUint64Array', value));
+    }
+}
+function assertBlob(value, message) {
+    if (!isBlob(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Blob', value));
+    }
+}
+function assertBoolean(value, message) {
+    if (!isBoolean(value)) {
+        throw new TypeError(message ?? typeErrorMessage('boolean', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertBoundFunction(value, message) {
+    if (!isBoundFunction(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Function', value));
+    }
+}
+/**
+Note: [Prefer using `Uint8Array` instead of `Buffer`.](https://sindresorhus.com/blog/goodbye-nodejs-buffer)
+*/
+function assertBuffer(value, message) {
+    if (!isBuffer(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Buffer', value));
+    }
+}
+function assertClass(value, message) {
+    if (!isClass(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Class', value));
+    }
+}
+function assertDataView(value, message) {
+    if (!isDataView(value)) {
+        throw new TypeError(message ?? typeErrorMessage('DataView', value));
+    }
+}
+function assertDate(value, message) {
+    if (!isDate(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Date', value));
+    }
+}
+function assertDirectInstanceOf(instance, class_, message) {
+    if (!isDirectInstanceOf(instance, class_)) {
+        throw new TypeError(message ?? typeErrorMessage('T', instance));
+    }
+}
+function assertEmptyArray(value, message) {
+    if (!isEmptyArray(value)) {
+        throw new TypeError(message ?? typeErrorMessage('empty array', value));
+    }
+}
+function assertEmptyMap(value, message) {
+    if (!isEmptyMap(value)) {
+        throw new TypeError(message ?? typeErrorMessage('empty map', value));
+    }
+}
+function assertEmptyObject(value, message) {
+    if (!isEmptyObject(value)) {
+        throw new TypeError(message ?? typeErrorMessage('empty object', value));
+    }
+}
+function assertEmptySet(value, message) {
+    if (!isEmptySet(value)) {
+        throw new TypeError(message ?? typeErrorMessage('empty set', value));
+    }
+}
+function assertEmptyString(value, message) {
+    if (!isEmptyString(value)) {
+        throw new TypeError(message ?? typeErrorMessage('empty string', value));
+    }
+}
+function assertEmptyStringOrWhitespace(value, message) {
+    if (!isEmptyStringOrWhitespace(value)) {
+        throw new TypeError(message ?? typeErrorMessage('empty string or whitespace', value));
+    }
+}
+function assertEnumCase(value, targetEnum, message) {
+    if (!isEnumCase(value, targetEnum)) {
+        throw new TypeError(message ?? typeErrorMessage('EnumCase', value));
+    }
+}
+function assertError(value, message) {
+    if (!isError(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Error', value));
+    }
+}
+function assertEvenInteger(value, message) {
+    if (!isEvenInteger(value)) {
+        throw new TypeError(message ?? typeErrorMessage('even integer', value));
+    }
+}
+function assertFalsy(value, message) {
+    if (!isFalsy(value)) {
+        throw new TypeError(message ?? typeErrorMessage('falsy', value));
+    }
+}
+function assertFloat32Array(value, message) {
+    if (!isFloat32Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Float32Array', value));
+    }
+}
+function assertFloat64Array(value, message) {
+    if (!isFloat64Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Float64Array', value));
+    }
+}
+function assertFormData(value, message) {
+    if (!isFormData(value)) {
+        throw new TypeError(message ?? typeErrorMessage('FormData', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertFunction(value, message) {
+    if (!isFunction(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Function', value));
+    }
+}
+function assertGenerator(value, message) {
+    if (!isGenerator(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Generator', value));
+    }
+}
+function assertGeneratorFunction(value, message) {
+    if (!isGeneratorFunction(value)) {
+        throw new TypeError(message ?? typeErrorMessage('GeneratorFunction', value));
+    }
+}
+function assertHtmlElement(value, message) {
+    if (!isHtmlElement(value)) {
+        throw new TypeError(message ?? typeErrorMessage('HTMLElement', value));
+    }
+}
+function assertInfinite(value, message) {
+    if (!isInfinite(value)) {
+        throw new TypeError(message ?? typeErrorMessage('infinite number', value));
+    }
+}
+function assertInRange(value, range, message) {
+    if (!isInRange(value, range)) {
+        throw new TypeError(message ?? typeErrorMessage('in range', value));
+    }
+}
+function assertInt16Array(value, message) {
+    if (!isInt16Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Int16Array', value));
+    }
+}
+function assertInt32Array(value, message) {
+    if (!isInt32Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Int32Array', value));
+    }
+}
+function assertInt8Array(value, message) {
+    if (!isInt8Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Int8Array', value));
+    }
+}
+function assertInteger(value, message) {
+    if (!isInteger(value)) {
+        throw new TypeError(message ?? typeErrorMessage('integer', value));
+    }
+}
+function assertIterable(value, message) {
+    if (!isIterable(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Iterable', value));
+    }
+}
+function assertMap(value, message) {
+    if (!isMap(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Map', value));
+    }
+}
+function assertNan(value, message) {
+    if (!isNan(value)) {
+        throw new TypeError(message ?? typeErrorMessage('NaN', value));
+    }
+}
+function assertNativePromise(value, message) {
+    if (!isNativePromise(value)) {
+        throw new TypeError(message ?? typeErrorMessage('native Promise', value));
+    }
+}
+function assertNegativeNumber(value, message) {
+    if (!isNegativeNumber(value)) {
+        throw new TypeError(message ?? typeErrorMessage('negative number', value));
+    }
+}
+function assertNodeStream(value, message) {
+    if (!isNodeStream(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Node.js Stream', value));
+    }
+}
+function assertNonEmptyArray(value, message) {
+    if (!isNonEmptyArray(value)) {
+        throw new TypeError(message ?? typeErrorMessage('non-empty array', value));
+    }
+}
+function assertNonEmptyMap(value, message) {
+    if (!isNonEmptyMap(value)) {
+        throw new TypeError(message ?? typeErrorMessage('non-empty map', value));
+    }
+}
+function assertNonEmptyObject(value, message) {
+    if (!isNonEmptyObject(value)) {
+        throw new TypeError(message ?? typeErrorMessage('non-empty object', value));
+    }
+}
+function assertNonEmptySet(value, message) {
+    if (!isNonEmptySet(value)) {
+        throw new TypeError(message ?? typeErrorMessage('non-empty set', value));
+    }
+}
+function assertNonEmptyString(value, message) {
+    if (!isNonEmptyString(value)) {
+        throw new TypeError(message ?? typeErrorMessage('non-empty string', value));
+    }
+}
+function assertNonEmptyStringAndNotWhitespace(value, message) {
+    if (!isNonEmptyStringAndNotWhitespace(value)) {
+        throw new TypeError(message ?? typeErrorMessage('non-empty string and not whitespace', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertNull(value, message) {
+    if (!isNull(value)) {
+        throw new TypeError(message ?? typeErrorMessage('null', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertNullOrUndefined(value, message) {
+    if (!isNullOrUndefined(value)) {
+        throw new TypeError(message ?? typeErrorMessage('null or undefined', value));
+    }
+}
+function assertNumber(value, message) {
+    if (!isNumber(value)) {
+        throw new TypeError(message ?? typeErrorMessage('number', value));
+    }
+}
+function assertNumericString(value, message) {
+    if (!isNumericString(value)) {
+        throw new TypeError(message ?? typeErrorMessage('string with a number', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertObject(value, message) {
+    if (!isObject(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Object', value));
+    }
+}
+function assertObservable(value, message) {
+    if (!isObservable(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Observable', value));
+    }
+}
+function assertOddInteger(value, message) {
+    if (!isOddInteger(value)) {
+        throw new TypeError(message ?? typeErrorMessage('odd integer', value));
+    }
+}
+function assertPlainObject(value, message) {
+    if (!isPlainObject(value)) {
+        throw new TypeError(message ?? typeErrorMessage('plain object', value));
+    }
+}
+function assertPositiveNumber(value, message) {
+    if (!isPositiveNumber(value)) {
+        throw new TypeError(message ?? typeErrorMessage('positive number', value));
+    }
+}
+function assertPrimitive(value, message) {
+    if (!isPrimitive(value)) {
+        throw new TypeError(message ?? typeErrorMessage('primitive', value));
+    }
+}
+function assertPromise(value, message) {
+    if (!isPromise(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Promise', value));
+    }
+}
+function assertPropertyKey(value, message) {
+    if (!isPropertyKey(value)) {
+        throw new TypeError(message ?? typeErrorMessage('PropertyKey', value));
+    }
+}
+function assertRegExp(value, message) {
+    if (!isRegExp(value)) {
+        throw new TypeError(message ?? typeErrorMessage('RegExp', value));
+    }
+}
+function assertSafeInteger(value, message) {
+    if (!isSafeInteger(value)) {
+        throw new TypeError(message ?? typeErrorMessage('integer', value));
+    }
+}
+function assertSet(value, message) {
+    if (!isSet(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Set', value));
+    }
+}
+function assertSharedArrayBuffer(value, message) {
+    if (!isSharedArrayBuffer(value)) {
+        throw new TypeError(message ?? typeErrorMessage('SharedArrayBuffer', value));
+    }
+}
+function assertString(value, message) {
+    if (!isString(value)) {
+        throw new TypeError(message ?? typeErrorMessage('string', value));
+    }
+}
+function assertSymbol(value, message) {
+    if (!isSymbol(value)) {
+        throw new TypeError(message ?? typeErrorMessage('symbol', value));
+    }
+}
+function assertTruthy(value, message) {
+    if (!isTruthy(value)) {
+        throw new TypeError(message ?? typeErrorMessage('truthy', value));
+    }
+}
+function assertTupleLike(value, guards, message) {
+    if (!isTupleLike(value, guards)) {
+        throw new TypeError(message ?? typeErrorMessage('tuple-like', value));
+    }
+}
+function assertTypedArray(value, message) {
+    if (!isTypedArray(value)) {
+        throw new TypeError(message ?? typeErrorMessage('TypedArray', value));
+    }
+}
+function assertUint16Array(value, message) {
+    if (!isUint16Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Uint16Array', value));
+    }
+}
+function assertUint32Array(value, message) {
+    if (!isUint32Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Uint32Array', value));
+    }
+}
+function assertUint8Array(value, message) {
+    if (!isUint8Array(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Uint8Array', value));
+    }
+}
+function assertUint8ClampedArray(value, message) {
+    if (!isUint8ClampedArray(value)) {
+        throw new TypeError(message ?? typeErrorMessage('Uint8ClampedArray', value));
+    }
+}
+function assertUndefined(value, message) {
+    if (!isUndefined(value)) {
+        throw new TypeError(message ?? typeErrorMessage('undefined', value));
+    }
+}
+function assertUrlInstance(value, message) {
+    if (!isUrlInstance(value)) {
+        throw new TypeError(message ?? typeErrorMessage('URL', value));
+    }
+}
+// eslint-disable-next-line unicorn/prevent-abbreviations
+function assertUrlSearchParams(value, message) {
+    if (!isUrlSearchParams(value)) {
+        throw new TypeError(message ?? typeErrorMessage('URLSearchParams', value));
+    }
+}
+function assertUrlString(value, message) {
+    if (!isUrlString(value)) {
+        throw new TypeError(message ?? typeErrorMessage('string with a URL', value));
+    }
+}
+function assertValidDate(value, message) {
+    if (!isValidDate(value)) {
+        throw new TypeError(message ?? typeErrorMessage('valid Date', value));
+    }
+}
+function assertValidLength(value, message) {
+    if (!isValidLength(value)) {
+        throw new TypeError(message ?? typeErrorMessage('valid length', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertWeakMap(value, message) {
+    if (!isWeakMap(value)) {
+        throw new TypeError(message ?? typeErrorMessage('WeakMap', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types, unicorn/prevent-abbreviations
+function assertWeakRef(value, message) {
+    if (!isWeakRef(value)) {
+        throw new TypeError(message ?? typeErrorMessage('WeakRef', value));
+    }
+}
+// eslint-disable-next-line @typescript-eslint/ban-types
+function assertWeakSet(value, message) {
+    if (!isWeakSet(value)) {
+        throw new TypeError(message ?? typeErrorMessage('WeakSet', value));
+    }
+}
+function assertWhitespaceString(value, message) {
+    if (!isWhitespaceString(value)) {
+        throw new TypeError(message ?? typeErrorMessage('whitespace string', value));
+    }
+}
+/* harmony default export */ const distribution = (is);
+
+// EXTERNAL MODULE: external "node:events"
+var external_node_events_ = __nccwpck_require__(8474);
+;// CONCATENATED MODULE: ./node_modules/.pnpm/p-cancelable@4.0.1/node_modules/p-cancelable/index.js
+class CancelError extends Error {
+	constructor(reason) {
+		super(reason || 'Promise was canceled');
+		this.name = 'CancelError';
+	}
+
+	get isCanceled() {
+		return true;
+	}
+}
+
+const promiseState = Object.freeze({
+	pending: Symbol('pending'),
+	canceled: Symbol('canceled'),
+	resolved: Symbol('resolved'),
+	rejected: Symbol('rejected'),
+});
+
+class PCancelable {
+	static fn(userFunction) {
+		return (...arguments_) => new PCancelable((resolve, reject, onCancel) => {
+			arguments_.push(onCancel);
+			userFunction(...arguments_).then(resolve, reject);
+		});
+	}
+
+	#cancelHandlers = [];
+	#rejectOnCancel = true;
+	#state = promiseState.pending;
+	#promise;
+	#reject;
+
+	constructor(executor) {
+		this.#promise = new Promise((resolve, reject) => {
+			this.#reject = reject;
+
+			const onResolve = value => {
+				if (this.#state !== promiseState.canceled || !onCancel.shouldReject) {
+					resolve(value);
+					this.#setState(promiseState.resolved);
+				}
+			};
+
+			const onReject = error => {
+				if (this.#state !== promiseState.canceled || !onCancel.shouldReject) {
+					reject(error);
+					this.#setState(promiseState.rejected);
+				}
+			};
+
+			const onCancel = handler => {
+				if (this.#state !== promiseState.pending) {
+					throw new Error(`The \`onCancel\` handler was attached after the promise ${this.#state.description}.`);
+				}
+
+				this.#cancelHandlers.push(handler);
+			};
+
+			Object.defineProperties(onCancel, {
+				shouldReject: {
+					get: () => this.#rejectOnCancel,
+					set: boolean => {
+						this.#rejectOnCancel = boolean;
+					},
+				},
+			});
+
+			executor(onResolve, onReject, onCancel);
+		});
+	}
+
+	// eslint-disable-next-line unicorn/no-thenable
+	then(onFulfilled, onRejected) {
+		return this.#promise.then(onFulfilled, onRejected);
+	}
+
+	catch(onRejected) {
+		return this.#promise.catch(onRejected);
+	}
+
+	finally(onFinally) {
+		return this.#promise.finally(onFinally);
+	}
+
+	cancel(reason) {
+		if (this.#state !== promiseState.pending) {
+			return;
+		}
+
+		this.#setState(promiseState.canceled);
+
+		if (this.#cancelHandlers.length > 0) {
+			try {
+				for (const handler of this.#cancelHandlers) {
+					handler();
+				}
+			} catch (error) {
+				this.#reject(error);
+				return;
+			}
+		}
+
+		if (this.#rejectOnCancel) {
+			this.#reject(new CancelError(reason));
+		}
+	}
+
+	get isCanceled() {
+		return this.#state === promiseState.canceled;
+	}
+
+	#setState(state) {
+		if (this.#state === promiseState.pending) {
+			this.#state = state;
+		}
+	}
+}
+
+Object.setPrototypeOf(PCancelable.prototype, Promise.prototype);
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/errors.js
+
+// A hacky check to prevent circular references.
+function isRequest(x) {
+    return distribution.object(x) && '_onResponse' in x;
+}
+/**
+An error to be thrown when a request fails.
+Contains a `code` property with error class code, like `ECONNREFUSED`.
+*/
+class RequestError extends Error {
+    input;
+    code;
+    stack;
+    response;
+    request;
+    timings;
+    constructor(message, error, self) {
+        super(message, { cause: error });
+        Error.captureStackTrace(this, this.constructor);
+        this.name = 'RequestError';
+        this.code = error.code ?? 'ERR_GOT_REQUEST_ERROR';
+        this.input = error.input;
+        if (isRequest(self)) {
+            Object.defineProperty(this, 'request', {
+                enumerable: false,
+                value: self,
+            });
+            Object.defineProperty(this, 'response', {
+                enumerable: false,
+                value: self.response,
+            });
+            this.options = self.options;
+        }
+        else {
+            this.options = self;
+        }
+        this.timings = this.request?.timings;
+        // Recover the original stacktrace
+        if (distribution.string(error.stack) && distribution.string(this.stack)) {
+            const indexOfMessage = this.stack.indexOf(this.message) + this.message.length;
+            const thisStackTrace = this.stack.slice(indexOfMessage).split('\n').reverse();
+            const errorStackTrace = error.stack.slice(error.stack.indexOf(error.message) + error.message.length).split('\n').reverse();
+            // Remove duplicated traces
+            while (errorStackTrace.length > 0 && errorStackTrace[0] === thisStackTrace[0]) {
+                thisStackTrace.shift();
+            }
+            this.stack = `${this.stack.slice(0, indexOfMessage)}${thisStackTrace.reverse().join('\n')}${errorStackTrace.reverse().join('\n')}`;
+        }
+    }
+}
+/**
+An error to be thrown when the server redirects you more than ten times.
+Includes a `response` property.
+*/
+class MaxRedirectsError extends RequestError {
+    constructor(request) {
+        super(`Redirected ${request.options.maxRedirects} times. Aborting.`, {}, request);
+        this.name = 'MaxRedirectsError';
+        this.code = 'ERR_TOO_MANY_REDIRECTS';
+    }
+}
+/**
+An error to be thrown when the server response code is not 2xx nor 3xx if `options.followRedirect` is `true`, but always except for 304.
+Includes a `response` property.
+*/
+// TODO: Change `HTTPError<T = any>` to `HTTPError<T = unknown>` in the next major version to enforce type usage.
+// eslint-disable-next-line @typescript-eslint/naming-convention
+class HTTPError extends RequestError {
+    constructor(response) {
+        super(`Response code ${response.statusCode} (${response.statusMessage})`, {}, response.request);
+        this.name = 'HTTPError';
+        this.code = 'ERR_NON_2XX_3XX_RESPONSE';
+    }
+}
+/**
+An error to be thrown when a cache method fails.
+For example, if the database goes down or there's a filesystem error.
+*/
+class CacheError extends RequestError {
+    constructor(error, request) {
+        super(error.message, error, request);
+        this.name = 'CacheError';
+        this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_CACHE_ACCESS' : this.code;
+    }
+}
+/**
+An error to be thrown when the request body is a stream and an error occurs while reading from that stream.
+*/
+class UploadError extends RequestError {
+    constructor(error, request) {
+        super(error.message, error, request);
+        this.name = 'UploadError';
+        this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_UPLOAD' : this.code;
+    }
+}
+/**
+An error to be thrown when the request is aborted due to a timeout.
+Includes an `event` and `timings` property.
+*/
+class TimeoutError extends RequestError {
+    timings;
+    event;
+    constructor(error, timings, request) {
+        super(error.message, error, request);
+        this.name = 'TimeoutError';
+        this.event = error.event;
+        this.timings = timings;
+    }
+}
+/**
+An error to be thrown when reading from response stream fails.
+*/
+class ReadError extends RequestError {
+    constructor(error, request) {
+        super(error.message, error, request);
+        this.name = 'ReadError';
+        this.code = this.code === 'ERR_GOT_REQUEST_ERROR' ? 'ERR_READING_RESPONSE_STREAM' : this.code;
+    }
+}
+/**
+An error which always triggers a new retry when thrown.
+*/
+class RetryError extends RequestError {
+    constructor(request) {
+        super('Retrying', {}, request);
+        this.name = 'RetryError';
+        this.code = 'ERR_RETRYING';
+    }
+}
+/**
+An error to be thrown when the request is aborted by AbortController.
+*/
+class AbortError extends RequestError {
+    constructor(request) {
+        super('This operation was aborted.', {}, request);
+        this.code = 'ERR_ABORTED';
+        this.name = 'AbortError';
+    }
+}
+
+// EXTERNAL MODULE: external "node:process"
+var external_node_process_ = __nccwpck_require__(1708);
+;// CONCATENATED MODULE: external "node:buffer"
+const external_node_buffer_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:buffer");
+// EXTERNAL MODULE: external "node:stream"
+var external_node_stream_ = __nccwpck_require__(7075);
+// EXTERNAL MODULE: external "node:http"
+var external_node_http_ = __nccwpck_require__(7067);
+// EXTERNAL MODULE: external "events"
+var external_events_ = __nccwpck_require__(4434);
+// EXTERNAL MODULE: external "util"
+var external_util_ = __nccwpck_require__(9023);
+// EXTERNAL MODULE: ./node_modules/.pnpm/defer-to-connect@2.0.1/node_modules/defer-to-connect/dist/source/index.js
+var source = __nccwpck_require__(7596);
+;// CONCATENATED MODULE: ./node_modules/.pnpm/@szmarczak+http-timer@5.0.1/node_modules/@szmarczak/http-timer/dist/source/index.js
+
+
+
+const timer = (request) => {
+    if (request.timings) {
+        return request.timings;
+    }
+    const timings = {
+        start: Date.now(),
+        socket: undefined,
+        lookup: undefined,
+        connect: undefined,
+        secureConnect: undefined,
+        upload: undefined,
+        response: undefined,
+        end: undefined,
+        error: undefined,
+        abort: undefined,
+        phases: {
+            wait: undefined,
+            dns: undefined,
+            tcp: undefined,
+            tls: undefined,
+            request: undefined,
+            firstByte: undefined,
+            download: undefined,
+            total: undefined,
+        },
+    };
+    request.timings = timings;
+    const handleError = (origin) => {
+        origin.once(external_events_.errorMonitor, () => {
+            timings.error = Date.now();
+            timings.phases.total = timings.error - timings.start;
+        });
+    };
+    handleError(request);
+    const onAbort = () => {
+        timings.abort = Date.now();
+        timings.phases.total = timings.abort - timings.start;
+    };
+    request.prependOnceListener('abort', onAbort);
+    const onSocket = (socket) => {
+        timings.socket = Date.now();
+        timings.phases.wait = timings.socket - timings.start;
+        if (external_util_.types.isProxy(socket)) {
+            return;
+        }
+        const lookupListener = () => {
+            timings.lookup = Date.now();
+            timings.phases.dns = timings.lookup - timings.socket;
+        };
+        socket.prependOnceListener('lookup', lookupListener);
+        source(socket, {
+            connect: () => {
+                timings.connect = Date.now();
+                if (timings.lookup === undefined) {
+                    socket.removeListener('lookup', lookupListener);
+                    timings.lookup = timings.connect;
+                    timings.phases.dns = timings.lookup - timings.socket;
+                }
+                timings.phases.tcp = timings.connect - timings.lookup;
+            },
+            secureConnect: () => {
+                timings.secureConnect = Date.now();
+                timings.phases.tls = timings.secureConnect - timings.connect;
+            },
+        });
+    };
+    if (request.socket) {
+        onSocket(request.socket);
+    }
+    else {
+        request.prependOnceListener('socket', onSocket);
+    }
+    const onUpload = () => {
+        timings.upload = Date.now();
+        timings.phases.request = timings.upload - (timings.secureConnect ?? timings.connect);
+    };
+    if (request.writableFinished) {
+        onUpload();
+    }
+    else {
+        request.prependOnceListener('finish', onUpload);
+    }
+    request.prependOnceListener('response', (response) => {
+        timings.response = Date.now();
+        timings.phases.firstByte = timings.response - timings.upload;
+        response.timings = timings;
+        handleError(response);
+        response.prependOnceListener('end', () => {
+            request.off('abort', onAbort);
+            response.off('aborted', onAbort);
+            if (timings.phases.total) {
+                // Aborted or errored
+                return;
+            }
+            timings.end = Date.now();
+            timings.phases.download = timings.end - timings.response;
+            timings.phases.total = timings.end - timings.start;
+        });
+        response.prependOnceListener('aborted', onAbort);
+    });
+    return timings;
+};
+/* harmony default export */ const dist_source = (timer);
+
+;// CONCATENATED MODULE: external "node:url"
+const external_node_url_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:url");
+;// CONCATENATED MODULE: ./node_modules/.pnpm/normalize-url@8.0.1/node_modules/normalize-url/index.js
+// https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs
+const DATA_URL_DEFAULT_MIME_TYPE = 'text/plain';
+const DATA_URL_DEFAULT_CHARSET = 'us-ascii';
+
+const testParameter = (name, filters) => filters.some(filter => filter instanceof RegExp ? filter.test(name) : filter === name);
+
+const supportedProtocols = new Set([
+	'https:',
+	'http:',
+	'file:',
+]);
+
+const hasCustomProtocol = urlString => {
+	try {
+		const {protocol} = new URL(urlString);
+
+		return protocol.endsWith(':')
+			&& !protocol.includes('.')
+			&& !supportedProtocols.has(protocol);
+	} catch {
+		return false;
+	}
+};
+
+const normalizeDataURL = (urlString, {stripHash}) => {
+	const match = /^data:(?<type>[^,]*?),(?<data>[^#]*?)(?:#(?<hash>.*))?$/.exec(urlString);
+
+	if (!match) {
+		throw new Error(`Invalid URL: ${urlString}`);
+	}
+
+	let {type, data, hash} = match.groups;
+	const mediaType = type.split(';');
+	hash = stripHash ? '' : hash;
+
+	let isBase64 = false;
+	if (mediaType[mediaType.length - 1] === 'base64') {
+		mediaType.pop();
+		isBase64 = true;
+	}
+
+	// Lowercase MIME type
+	const mimeType = mediaType.shift()?.toLowerCase() ?? '';
+	const attributes = mediaType
+		.map(attribute => {
+			let [key, value = ''] = attribute.split('=').map(string => string.trim());
+
+			// Lowercase `charset`
+			if (key === 'charset') {
+				value = value.toLowerCase();
+
+				if (value === DATA_URL_DEFAULT_CHARSET) {
+					return '';
+				}
+			}
+
+			return `${key}${value ? `=${value}` : ''}`;
+		})
+		.filter(Boolean);
+
+	const normalizedMediaType = [
+		...attributes,
+	];
+
+	if (isBase64) {
+		normalizedMediaType.push('base64');
+	}
+
+	if (normalizedMediaType.length > 0 || (mimeType && mimeType !== DATA_URL_DEFAULT_MIME_TYPE)) {
+		normalizedMediaType.unshift(mimeType);
+	}
+
+	return `data:${normalizedMediaType.join(';')},${isBase64 ? data.trim() : data}${hash ? `#${hash}` : ''}`;
+};
+
+function normalizeUrl(urlString, options) {
+	options = {
+		defaultProtocol: 'http',
+		normalizeProtocol: true,
+		forceHttp: false,
+		forceHttps: false,
+		stripAuthentication: true,
+		stripHash: false,
+		stripTextFragment: true,
+		stripWWW: true,
+		removeQueryParameters: [/^utm_\w+/i],
+		removeTrailingSlash: true,
+		removeSingleSlash: true,
+		removeDirectoryIndex: false,
+		removeExplicitPort: false,
+		sortQueryParameters: true,
+		...options,
+	};
+
+	// Legacy: Append `:` to the protocol if missing.
+	if (typeof options.defaultProtocol === 'string' && !options.defaultProtocol.endsWith(':')) {
+		options.defaultProtocol = `${options.defaultProtocol}:`;
+	}
+
+	urlString = urlString.trim();
+
+	// Data URL
+	if (/^data:/i.test(urlString)) {
+		return normalizeDataURL(urlString, options);
+	}
+
+	if (hasCustomProtocol(urlString)) {
+		return urlString;
+	}
+
+	const hasRelativeProtocol = urlString.startsWith('//');
+	const isRelativeUrl = !hasRelativeProtocol && /^\.*\//.test(urlString);
+
+	// Prepend protocol
+	if (!isRelativeUrl) {
+		urlString = urlString.replace(/^(?!(?:\w+:)?\/\/)|^\/\//, options.defaultProtocol);
+	}
+
+	const urlObject = new URL(urlString);
+
+	if (options.forceHttp && options.forceHttps) {
+		throw new Error('The `forceHttp` and `forceHttps` options cannot be used together');
+	}
+
+	if (options.forceHttp && urlObject.protocol === 'https:') {
+		urlObject.protocol = 'http:';
+	}
+
+	if (options.forceHttps && urlObject.protocol === 'http:') {
+		urlObject.protocol = 'https:';
+	}
+
+	// Remove auth
+	if (options.stripAuthentication) {
+		urlObject.username = '';
+		urlObject.password = '';
+	}
+
+	// Remove hash
+	if (options.stripHash) {
+		urlObject.hash = '';
+	} else if (options.stripTextFragment) {
+		urlObject.hash = urlObject.hash.replace(/#?:~:text.*?$/i, '');
+	}
+
+	// Remove duplicate slashes if not preceded by a protocol
+	// NOTE: This could be implemented using a single negative lookbehind
+	// regex, but we avoid that to maintain compatibility with older js engines
+	// which do not have support for that feature.
+	if (urlObject.pathname) {
+		// TODO: Replace everything below with `urlObject.pathname = urlObject.pathname.replace(/(?<!\b[a-z][a-z\d+\-.]{1,50}:)\/{2,}/g, '/');` when Safari supports negative lookbehind.
+
+		// Split the string by occurrences of this protocol regex, and perform
+		// duplicate-slash replacement on the strings between those occurrences
+		// (if any).
+		const protocolRegex = /\b[a-z][a-z\d+\-.]{1,50}:\/\//g;
+
+		let lastIndex = 0;
+		let result = '';
+		for (;;) {
+			const match = protocolRegex.exec(urlObject.pathname);
+			if (!match) {
+				break;
+			}
+
+			const protocol = match[0];
+			const protocolAtIndex = match.index;
+			const intermediate = urlObject.pathname.slice(lastIndex, protocolAtIndex);
+
+			result += intermediate.replace(/\/{2,}/g, '/');
+			result += protocol;
+			lastIndex = protocolAtIndex + protocol.length;
+		}
+
+		const remnant = urlObject.pathname.slice(lastIndex, urlObject.pathname.length);
+		result += remnant.replace(/\/{2,}/g, '/');
+
+		urlObject.pathname = result;
+	}
+
+	// Decode URI octets
+	if (urlObject.pathname) {
+		try {
+			urlObject.pathname = decodeURI(urlObject.pathname);
+		} catch {}
+	}
+
+	// Remove directory index
+	if (options.removeDirectoryIndex === true) {
+		options.removeDirectoryIndex = [/^index\.[a-z]+$/];
+	}
+
+	if (Array.isArray(options.removeDirectoryIndex) && options.removeDirectoryIndex.length > 0) {
+		let pathComponents = urlObject.pathname.split('/');
+		const lastComponent = pathComponents[pathComponents.length - 1];
+
+		if (testParameter(lastComponent, options.removeDirectoryIndex)) {
+			pathComponents = pathComponents.slice(0, -1);
+			urlObject.pathname = pathComponents.slice(1).join('/') + '/';
+		}
+	}
+
+	if (urlObject.hostname) {
+		// Remove trailing dot
+		urlObject.hostname = urlObject.hostname.replace(/\.$/, '');
+
+		// Remove `www.`
+		if (options.stripWWW && /^www\.(?!www\.)[a-z\-\d]{1,63}\.[a-z.\-\d]{2,63}$/.test(urlObject.hostname)) {
+			// Each label should be max 63 at length (min: 1).
+			// Source: https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
+			// Each TLD should be up to 63 characters long (min: 2).
+			// It is technically possible to have a single character TLD, but none currently exist.
+			urlObject.hostname = urlObject.hostname.replace(/^www\./, '');
+		}
+	}
+
+	// Remove query unwanted parameters
+	if (Array.isArray(options.removeQueryParameters)) {
+		// eslint-disable-next-line unicorn/no-useless-spread -- We are intentionally spreading to get a copy.
+		for (const key of [...urlObject.searchParams.keys()]) {
+			if (testParameter(key, options.removeQueryParameters)) {
+				urlObject.searchParams.delete(key);
+			}
+		}
+	}
+
+	if (!Array.isArray(options.keepQueryParameters) && options.removeQueryParameters === true) {
+		urlObject.search = '';
+	}
+
+	// Keep wanted query parameters
+	if (Array.isArray(options.keepQueryParameters) && options.keepQueryParameters.length > 0) {
+		// eslint-disable-next-line unicorn/no-useless-spread -- We are intentionally spreading to get a copy.
+		for (const key of [...urlObject.searchParams.keys()]) {
+			if (!testParameter(key, options.keepQueryParameters)) {
+				urlObject.searchParams.delete(key);
+			}
+		}
+	}
+
+	// Sort query parameters
+	if (options.sortQueryParameters) {
+		urlObject.searchParams.sort();
+
+		// Calling `.sort()` encodes the search parameters, so we need to decode them again.
+		try {
+			urlObject.search = decodeURIComponent(urlObject.search);
+		} catch {}
+	}
+
+	if (options.removeTrailingSlash) {
+		urlObject.pathname = urlObject.pathname.replace(/\/$/, '');
+	}
+
+	// Remove an explicit port number, excluding a default port number, if applicable
+	if (options.removeExplicitPort && urlObject.port) {
+		urlObject.port = '';
+	}
+
+	const oldUrlString = urlString;
+
+	// Take advantage of many of the Node `url` normalizations
+	urlString = urlObject.toString();
+
+	if (!options.removeSingleSlash && urlObject.pathname === '/' && !oldUrlString.endsWith('/') && urlObject.hash === '') {
+		urlString = urlString.replace(/\/$/, '');
+	}
+
+	// Remove ending `/` unless removeSingleSlash is false
+	if ((options.removeTrailingSlash || urlObject.pathname === '/') && urlObject.hash === '' && options.removeSingleSlash) {
+		urlString = urlString.replace(/\/$/, '');
+	}
+
+	// Restore relative protocol, if applicable
+	if (hasRelativeProtocol && !options.normalizeProtocol) {
+		urlString = urlString.replace(/^http:\/\//, '//');
+	}
+
+	// Remove http/https
+	if (options.stripProtocol) {
+		urlString = urlString.replace(/^(?:https?:)?\/\//, '');
+	}
+
+	return urlString;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/is-stream@4.0.1/node_modules/is-stream/index.js
+function isStream(stream, {checkOpen = true} = {}) {
+	return stream !== null
+		&& typeof stream === 'object'
+		&& (stream.writable || stream.readable || !checkOpen || (stream.writable === undefined && stream.readable === undefined))
+		&& typeof stream.pipe === 'function';
+}
+
+function isWritableStream(stream, {checkOpen = true} = {}) {
+	return isStream(stream, {checkOpen})
+		&& (stream.writable || !checkOpen)
+		&& typeof stream.write === 'function'
+		&& typeof stream.end === 'function'
+		&& typeof stream.writable === 'boolean'
+		&& typeof stream.writableObjectMode === 'boolean'
+		&& typeof stream.destroy === 'function'
+		&& typeof stream.destroyed === 'boolean';
+}
+
+function isReadableStream(stream, {checkOpen = true} = {}) {
+	return isStream(stream, {checkOpen})
+		&& (stream.readable || !checkOpen)
+		&& typeof stream.read === 'function'
+		&& typeof stream.readable === 'boolean'
+		&& typeof stream.readableObjectMode === 'boolean'
+		&& typeof stream.destroy === 'function'
+		&& typeof stream.destroyed === 'boolean';
+}
+
+function isDuplexStream(stream, options) {
+	return isWritableStream(stream, options)
+		&& isReadableStream(stream, options);
+}
+
+function isTransformStream(stream, options) {
+	return isDuplexStream(stream, options)
+		&& typeof stream._transform === 'function';
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/@sec-ant+readable-stream@0.4.1/node_modules/@sec-ant/readable-stream/dist/ponyfill/asyncIterator.js
+const a = Object.getPrototypeOf(
+  Object.getPrototypeOf(
+    /* istanbul ignore next */
+    async function* () {
+    }
+  ).prototype
+);
+class c {
+  #t;
+  #n;
+  #r = !1;
+  #e = void 0;
+  constructor(e, t) {
+    this.#t = e, this.#n = t;
+  }
+  next() {
+    const e = () => this.#s();
+    return this.#e = this.#e ? this.#e.then(e, e) : e(), this.#e;
+  }
+  return(e) {
+    const t = () => this.#i(e);
+    return this.#e ? this.#e.then(t, t) : t();
+  }
+  async #s() {
+    if (this.#r)
+      return {
+        done: !0,
+        value: void 0
+      };
+    let e;
+    try {
+      e = await this.#t.read();
+    } catch (t) {
+      throw this.#e = void 0, this.#r = !0, this.#t.releaseLock(), t;
+    }
+    return e.done && (this.#e = void 0, this.#r = !0, this.#t.releaseLock()), e;
+  }
+  async #i(e) {
+    if (this.#r)
+      return {
+        done: !0,
+        value: e
+      };
+    if (this.#r = !0, !this.#n) {
+      const t = this.#t.cancel(e);
+      return this.#t.releaseLock(), await t, {
+        done: !0,
+        value: e
+      };
+    }
+    return this.#t.releaseLock(), {
+      done: !0,
+      value: e
+    };
+  }
+}
+const n = Symbol();
+function i() {
+  return this[n].next();
+}
+Object.defineProperty(i, "name", { value: "next" });
+function o(r) {
+  return this[n].return(r);
+}
+Object.defineProperty(o, "name", { value: "return" });
+const u = Object.create(a, {
+  next: {
+    enumerable: !0,
+    configurable: !0,
+    writable: !0,
+    value: i
+  },
+  return: {
+    enumerable: !0,
+    configurable: !0,
+    writable: !0,
+    value: o
+  }
+});
+function h({ preventCancel: r = !1 } = {}) {
+  const e = this.getReader(), t = new c(
+    e,
+    r
+  ), s = Object.create(u);
+  return s[n] = t, s;
+}
+
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/@sec-ant+readable-stream@0.4.1/node_modules/@sec-ant/readable-stream/dist/ponyfill/index.js
+
+
+
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/get-stream@9.0.1/node_modules/get-stream/source/stream.js
+
+
+
+const getAsyncIterable = stream => {
+	if (isReadableStream(stream, {checkOpen: false}) && nodeImports.on !== undefined) {
+		return getStreamIterable(stream);
+	}
+
+	if (typeof stream?.[Symbol.asyncIterator] === 'function') {
+		return stream;
+	}
+
+	// `ReadableStream[Symbol.asyncIterator]` support is missing in multiple browsers, so we ponyfill it
+	if (stream_toString.call(stream) === '[object ReadableStream]') {
+		return h.call(stream);
+	}
+
+	throw new TypeError('The first argument must be a Readable, a ReadableStream, or an async iterable.');
+};
+
+const {toString: stream_toString} = Object.prototype;
+
+// The default iterable for Node.js streams does not allow for multiple readers at once, so we re-implement it
+const getStreamIterable = async function * (stream) {
+	const controller = new AbortController();
+	const state = {};
+	handleStreamEnd(stream, controller, state);
+
+	try {
+		for await (const [chunk] of nodeImports.on(stream, 'data', {signal: controller.signal})) {
+			yield chunk;
+		}
+	} catch (error) {
+		// Stream failure, for example due to `stream.destroy(error)`
+		if (state.error !== undefined) {
+			throw state.error;
+		// `error` event directly emitted on stream
+		} else if (!controller.signal.aborted) {
+			throw error;
+		// Otherwise, stream completed successfully
+		}
+		// The `finally` block also runs when the caller throws, for example due to the `maxBuffer` option
+	} finally {
+		stream.destroy();
+	}
+};
+
+const handleStreamEnd = async (stream, controller, state) => {
+	try {
+		await nodeImports.finished(stream, {
+			cleanup: true,
+			readable: true,
+			writable: false,
+			error: false,
+		});
+	} catch (error) {
+		state.error = error;
+	} finally {
+		controller.abort();
+	}
+};
+
+// Loaded by the Node entrypoint, but not by the browser one.
+// This prevents using dynamic imports.
+const nodeImports = {};
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/get-stream@9.0.1/node_modules/get-stream/source/contents.js
+
+
+const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => {
+	const asyncIterable = getAsyncIterable(stream);
+
+	const state = init();
+	state.length = 0;
+
+	try {
+		for await (const chunk of asyncIterable) {
+			const chunkType = getChunkType(chunk);
+			const convertedChunk = convertChunk[chunkType](chunk, state);
+			appendChunk({
+				convertedChunk,
+				state,
+				getSize,
+				truncateChunk,
+				addChunk,
+				maxBuffer,
+			});
+		}
+
+		appendFinalChunk({
+			state,
+			convertChunk,
+			getSize,
+			truncateChunk,
+			addChunk,
+			getFinalChunk,
+			maxBuffer,
+		});
+		return finalize(state);
+	} catch (error) {
+		const normalizedError = typeof error === 'object' && error !== null ? error : new Error(error);
+		normalizedError.bufferedData = finalize(state);
+		throw normalizedError;
+	}
+};
+
+const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => {
+	const convertedChunk = getFinalChunk(state);
+	if (convertedChunk !== undefined) {
+		appendChunk({
+			convertedChunk,
+			state,
+			getSize,
+			truncateChunk,
+			addChunk,
+			maxBuffer,
+		});
+	}
+};
+
+const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => {
+	const chunkSize = getSize(convertedChunk);
+	const newLength = state.length + chunkSize;
+
+	if (newLength <= maxBuffer) {
+		addNewChunk(convertedChunk, state, addChunk, newLength);
+		return;
+	}
+
+	const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length);
+
+	if (truncatedChunk !== undefined) {
+		addNewChunk(truncatedChunk, state, addChunk, maxBuffer);
+	}
+
+	throw new MaxBufferError();
+};
+
+const addNewChunk = (convertedChunk, state, addChunk, newLength) => {
+	state.contents = addChunk(convertedChunk, state, newLength);
+	state.length = newLength;
+};
+
+const getChunkType = chunk => {
+	const typeOfChunk = typeof chunk;
+
+	if (typeOfChunk === 'string') {
+		return 'string';
+	}
+
+	if (typeOfChunk !== 'object' || chunk === null) {
+		return 'others';
+	}
+
+	if (globalThis.Buffer?.isBuffer(chunk)) {
+		return 'buffer';
+	}
+
+	const prototypeName = objectToString.call(chunk);
+
+	if (prototypeName === '[object ArrayBuffer]') {
+		return 'arrayBuffer';
+	}
+
+	if (prototypeName === '[object DataView]') {
+		return 'dataView';
+	}
+
+	if (
+		Number.isInteger(chunk.byteLength)
+		&& Number.isInteger(chunk.byteOffset)
+		&& objectToString.call(chunk.buffer) === '[object ArrayBuffer]'
+	) {
+		return 'typedArray';
+	}
+
+	return 'others';
+};
+
+const {toString: objectToString} = Object.prototype;
+
+class MaxBufferError extends Error {
+	name = 'MaxBufferError';
+
+	constructor() {
+		super('maxBuffer exceeded');
+	}
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/get-stream@9.0.1/node_modules/get-stream/source/utils.js
+const identity = value => value;
+
+const noop = () => undefined;
+
+const getContentsProperty = ({contents}) => contents;
+
+const throwObjectStream = chunk => {
+	throw new Error(`Streams in object mode are not supported: ${String(chunk)}`);
+};
+
+const getLengthProperty = convertedChunk => convertedChunk.length;
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/get-stream@9.0.1/node_modules/get-stream/source/array-buffer.js
+
+
+
+async function getStreamAsArrayBuffer(stream, options) {
+	return getStreamContents(stream, arrayBufferMethods, options);
+}
+
+const initArrayBuffer = () => ({contents: new ArrayBuffer(0)});
+
+const useTextEncoder = chunk => textEncoder.encode(chunk);
+const textEncoder = new TextEncoder();
+
+const useUint8Array = chunk => new Uint8Array(chunk);
+
+const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+
+const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize);
+
+// `contents` is an increasingly growing `Uint8Array`.
+const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => {
+	const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length);
+	new Uint8Array(newContents).set(convertedChunk, previousLength);
+	return newContents;
+};
+
+// Without `ArrayBuffer.resize()`, `contents` size is always a power of 2.
+// This means its last bytes are zeroes (not stream data), which need to be
+// trimmed at the end with `ArrayBuffer.slice()`.
+const resizeArrayBufferSlow = (contents, length) => {
+	if (length <= contents.byteLength) {
+		return contents;
+	}
+
+	const arrayBuffer = new ArrayBuffer(getNewContentsLength(length));
+	new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
+	return arrayBuffer;
+};
+
+// With `ArrayBuffer.resize()`, `contents` size matches exactly the size of
+// the stream data. It does not include extraneous zeroes to trim at the end.
+// The underlying `ArrayBuffer` does allocate a number of bytes that is a power
+// of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`.
+const resizeArrayBuffer = (contents, length) => {
+	if (length <= contents.maxByteLength) {
+		contents.resize(length);
+		return contents;
+	}
+
+	const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)});
+	new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0);
+	return arrayBuffer;
+};
+
+// Retrieve the closest `length` that is both >= and a power of 2
+const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR));
+
+const SCALE_FACTOR = 2;
+
+const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length);
+
+// `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available
+// (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.
+// eslint-disable-next-line no-warning-comments
+// TODO: remove after dropping support for Node 20.
+// eslint-disable-next-line no-warning-comments
+// TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available
+const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype;
+
+const arrayBufferMethods = {
+	init: initArrayBuffer,
+	convertChunk: {
+		string: useTextEncoder,
+		buffer: useUint8Array,
+		arrayBuffer: useUint8Array,
+		dataView: useUint8ArrayWithOffset,
+		typedArray: useUint8ArrayWithOffset,
+		others: throwObjectStream,
+	},
+	getSize: getLengthProperty,
+	truncateChunk: truncateArrayBufferChunk,
+	addChunk: addArrayBufferChunk,
+	getFinalChunk: noop,
+	finalize: finalizeArrayBuffer,
+};
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/get-stream@9.0.1/node_modules/get-stream/source/buffer.js
+
+
+async function getStreamAsBuffer(stream, options) {
+	if (!('Buffer' in globalThis)) {
+		throw new Error('getStreamAsBuffer() is only supported in Node.js');
+	}
+
+	try {
+		return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options));
+	} catch (error) {
+		if (error.bufferedData !== undefined) {
+			error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData);
+		}
+
+		throw error;
+	}
+}
+
+const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer);
+
+// EXTERNAL MODULE: ./node_modules/.pnpm/http-cache-semantics@4.1.1/node_modules/http-cache-semantics/index.js
+var http_cache_semantics = __nccwpck_require__(9038);
+;// CONCATENATED MODULE: ./node_modules/.pnpm/lowercase-keys@3.0.0/node_modules/lowercase-keys/index.js
+function lowercaseKeys(object) {
+	return Object.fromEntries(Object.entries(object).map(([key, value]) => [key.toLowerCase(), value]));
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/responselike@3.0.0/node_modules/responselike/index.js
+
+
+
+class Response extends external_node_stream_.Readable {
+	statusCode;
+	headers;
+	body;
+	url;
+
+	constructor({statusCode, headers, body, url}) {
+		if (typeof statusCode !== 'number') {
+			throw new TypeError('Argument `statusCode` should be a number');
+		}
+
+		if (typeof headers !== 'object') {
+			throw new TypeError('Argument `headers` should be an object');
+		}
+
+		if (!(body instanceof Uint8Array)) {
+			throw new TypeError('Argument `body` should be a buffer');
+		}
+
+		if (typeof url !== 'string') {
+			throw new TypeError('Argument `url` should be a string');
+		}
+
+		super({
+			read() {
+				this.push(body);
+				this.push(null);
+			},
+		});
+
+		this.statusCode = statusCode;
+		this.headers = lowercaseKeys(headers);
+		this.body = body;
+		this.url = url;
+	}
+}
+
+// EXTERNAL MODULE: ./node_modules/.pnpm/keyv@4.5.4/node_modules/keyv/src/index.js
+var src = __nccwpck_require__(8832);
+;// CONCATENATED MODULE: ./node_modules/.pnpm/mimic-response@4.0.0/node_modules/mimic-response/index.js
+// We define these manually to ensure they're always copied
+// even if they would move up the prototype chain
+// https://nodejs.org/api/http.html#http_class_http_incomingmessage
+const knownProperties = [
+	'aborted',
+	'complete',
+	'headers',
+	'httpVersion',
+	'httpVersionMinor',
+	'httpVersionMajor',
+	'method',
+	'rawHeaders',
+	'rawTrailers',
+	'setTimeout',
+	'socket',
+	'statusCode',
+	'statusMessage',
+	'trailers',
+	'url',
+];
+
+function mimicResponse(fromStream, toStream) {
+	if (toStream._readableState.autoDestroy) {
+		throw new Error('The second stream must have the `autoDestroy` option set to `false`');
+	}
+
+	const fromProperties = new Set([...Object.keys(fromStream), ...knownProperties]);
+
+	const properties = {};
+
+	for (const property of fromProperties) {
+		// Don't overwrite existing properties.
+		if (property in toStream) {
+			continue;
+		}
+
+		properties[property] = {
+			get() {
+				const value = fromStream[property];
+				const isFunction = typeof value === 'function';
+
+				return isFunction ? value.bind(fromStream) : value;
+			},
+			set(value) {
+				fromStream[property] = value;
+			},
+			enumerable: true,
+			configurable: false,
+		};
+	}
+
+	Object.defineProperties(toStream, properties);
+
+	fromStream.once('aborted', () => {
+		toStream.destroy();
+
+		toStream.emit('aborted');
+	});
+
+	fromStream.once('close', () => {
+		if (fromStream.complete) {
+			if (toStream.readable) {
+				toStream.once('end', () => {
+					toStream.emit('close');
+				});
+			} else {
+				toStream.emit('close');
+			}
+		} else {
+			toStream.emit('close');
+		}
+	});
+
+	return toStream;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/cacheable-request@12.0.1/node_modules/cacheable-request/dist/types.js
+// Type definitions for cacheable-request 6.0
+// Project: https://github.com/lukechilds/cacheable-request#readme
+// Definitions by: BendingBender <https://github.com/BendingBender>
+//                 Paul Melnikow <https://github.com/paulmelnikow>
+// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+// TypeScript Version: 2.3
+class types_RequestError extends Error {
+    constructor(error) {
+        super(error.message);
+        Object.assign(this, error);
+    }
+}
+class types_CacheError extends Error {
+    constructor(error) {
+        super(error.message);
+        Object.assign(this, error);
+    }
+}
+//# sourceMappingURL=types.js.map
+;// CONCATENATED MODULE: ./node_modules/.pnpm/cacheable-request@12.0.1/node_modules/cacheable-request/dist/index.js
+
+
+
+
+
+
+
+
+
+
+
+class CacheableRequest {
+    constructor(cacheRequest, cacheAdapter) {
+        this.hooks = new Map();
+        this.request = () => (options, callback) => {
+            let url;
+            if (typeof options === 'string') {
+                url = normalizeUrlObject(external_node_url_namespaceObject.parse(options));
+                options = {};
+            }
+            else if (options instanceof external_node_url_namespaceObject.URL) {
+                url = normalizeUrlObject(external_node_url_namespaceObject.parse(options.toString()));
+                options = {};
+            }
+            else {
+                const [pathname, ...searchParts] = (options.path ?? '').split('?');
+                const search = searchParts.length > 0
+                    ? `?${searchParts.join('?')}`
+                    : '';
+                url = normalizeUrlObject({ ...options, pathname, search });
+            }
+            options = {
+                headers: {},
+                method: 'GET',
+                cache: true,
+                strictTtl: false,
+                automaticFailover: false,
+                ...options,
+                ...urlObjectToRequestOptions(url),
+            };
+            options.headers = Object.fromEntries(entries(options.headers).map(([key, value]) => [key.toLowerCase(), value]));
+            const ee = new external_node_events_();
+            const normalizedUrlString = normalizeUrl(external_node_url_namespaceObject.format(url), {
+                stripWWW: false, // eslint-disable-line @typescript-eslint/naming-convention
+                removeTrailingSlash: false,
+                stripAuthentication: false,
+            });
+            let key = `${options.method}:${normalizedUrlString}`;
+            // POST, PATCH, and PUT requests may be cached, depending on the response
+            // cache-control headers. As a result, the body of the request should be
+            // added to the cache key in order to avoid collisions.
+            if (options.body && options.method !== undefined && ['POST', 'PATCH', 'PUT'].includes(options.method)) {
+                if (options.body instanceof external_node_stream_.Readable) {
+                    // Streamed bodies should completely skip the cache because they may
+                    // or may not be hashable and in either case the stream would need to
+                    // close before the cache key could be generated.
+                    options.cache = false;
+                }
+                else {
+                    key += `:${external_node_crypto_namespaceObject.createHash('md5').update(options.body).digest('hex')}`;
+                }
+            }
+            let revalidate = false;
+            let madeRequest = false;
+            const makeRequest = (options_) => {
+                madeRequest = true;
+                let requestErrored = false;
+                let requestErrorCallback = () => { };
+                const requestErrorPromise = new Promise(resolve => {
+                    requestErrorCallback = () => {
+                        if (!requestErrored) {
+                            requestErrored = true;
+                            resolve();
+                        }
+                    };
+                });
+                const handler = async (response) => {
+                    if (revalidate) {
+                        response.status = response.statusCode;
+                        const revalidatedPolicy = http_cache_semantics.fromObject(revalidate.cachePolicy).revalidatedPolicy(options_, response);
+                        if (!revalidatedPolicy.modified) {
+                            response.resume();
+                            await new Promise(resolve => {
+                                // Skipping 'error' handler cause 'error' event should't be emitted for 304 response
+                                response
+                                    .once('end', resolve);
+                            });
+                            const headers = convertHeaders(revalidatedPolicy.policy.responseHeaders());
+                            response = new Response({
+                                statusCode: revalidate.statusCode, headers, body: revalidate.body, url: revalidate.url,
+                            });
+                            response.cachePolicy = revalidatedPolicy.policy;
+                            response.fromCache = true;
+                        }
+                    }
+                    if (!response.fromCache) {
+                        response.cachePolicy = new http_cache_semantics(options_, response, options_);
+                        response.fromCache = false;
+                    }
+                    let clonedResponse;
+                    if (options_.cache && response.cachePolicy.storable()) {
+                        clonedResponse = cloneResponse(response);
+                        (async () => {
+                            try {
+                                const bodyPromise = getStreamAsBuffer(response);
+                                await Promise.race([
+                                    requestErrorPromise,
+                                    new Promise(resolve => response.once('end', resolve)), // eslint-disable-line no-promise-executor-return
+                                    new Promise(resolve => response.once('close', resolve)), // eslint-disable-line no-promise-executor-return
+                                ]);
+                                const body = await bodyPromise;
+                                let value = {
+                                    url: response.url,
+                                    statusCode: response.fromCache ? revalidate.statusCode : response.statusCode,
+                                    body,
+                                    cachePolicy: response.cachePolicy.toObject(),
+                                };
+                                let ttl = options_.strictTtl ? response.cachePolicy.timeToLive() : undefined;
+                                if (options_.maxTtl) {
+                                    ttl = ttl ? Math.min(ttl, options_.maxTtl) : options_.maxTtl;
+                                }
+                                if (this.hooks.size > 0) {
+                                    /* eslint-disable no-await-in-loop */
+                                    for (const key_ of this.hooks.keys()) {
+                                        value = await this.runHook(key_, value, response);
+                                    }
+                                    /* eslint-enable no-await-in-loop */
+                                }
+                                await this.cache.set(key, value, ttl);
+                            }
+                            catch (error) {
+                                ee.emit('error', new types_CacheError(error));
+                            }
+                        })();
+                    }
+                    else if (options_.cache && revalidate) {
+                        (async () => {
+                            try {
+                                await this.cache.delete(key);
+                            }
+                            catch (error) {
+                                ee.emit('error', new types_CacheError(error));
+                            }
+                        })();
+                    }
+                    ee.emit('response', clonedResponse ?? response);
+                    if (typeof callback === 'function') {
+                        callback(clonedResponse ?? response);
+                    }
+                };
+                try {
+                    const request_ = this.cacheRequest(options_, handler);
+                    request_.once('error', requestErrorCallback);
+                    request_.once('abort', requestErrorCallback);
+                    request_.once('destroy', requestErrorCallback);
+                    ee.emit('request', request_);
+                }
+                catch (error) {
+                    ee.emit('error', new types_RequestError(error));
+                }
+            };
+            (async () => {
+                const get = async (options_) => {
+                    await Promise.resolve();
+                    const cacheEntry = options_.cache ? await this.cache.get(key) : undefined;
+                    if (cacheEntry === undefined && !options_.forceRefresh) {
+                        makeRequest(options_);
+                        return;
+                    }
+                    const policy = http_cache_semantics.fromObject(cacheEntry.cachePolicy);
+                    if (policy.satisfiesWithoutRevalidation(options_) && !options_.forceRefresh) {
+                        const headers = convertHeaders(policy.responseHeaders());
+                        const response = new Response({
+                            statusCode: cacheEntry.statusCode, headers, body: cacheEntry.body, url: cacheEntry.url,
+                        });
+                        response.cachePolicy = policy;
+                        response.fromCache = true;
+                        ee.emit('response', response);
+                        if (typeof callback === 'function') {
+                            callback(response);
+                        }
+                    }
+                    else if (policy.satisfiesWithoutRevalidation(options_) && Date.now() >= policy.timeToLive() && options_.forceRefresh) {
+                        await this.cache.delete(key);
+                        options_.headers = policy.revalidationHeaders(options_);
+                        makeRequest(options_);
+                    }
+                    else {
+                        revalidate = cacheEntry;
+                        options_.headers = policy.revalidationHeaders(options_);
+                        makeRequest(options_);
+                    }
+                };
+                const errorHandler = (error) => ee.emit('error', new types_CacheError(error));
+                if (this.cache instanceof src) {
+                    const cachek = this.cache;
+                    cachek.once('error', errorHandler);
+                    ee.on('error', () => cachek.removeListener('error', errorHandler));
+                    ee.on('response', () => cachek.removeListener('error', errorHandler));
+                }
+                try {
+                    await get(options);
+                }
+                catch (error) {
+                    if (options.automaticFailover && !madeRequest) {
+                        makeRequest(options);
+                    }
+                    ee.emit('error', new types_CacheError(error));
+                }
+            })();
+            return ee;
+        };
+        this.addHook = (name, function_) => {
+            if (!this.hooks.has(name)) {
+                this.hooks.set(name, function_);
+            }
+        };
+        this.removeHook = (name) => this.hooks.delete(name);
+        this.getHook = (name) => this.hooks.get(name);
+        this.runHook = async (name, ...arguments_) => this.hooks.get(name)?.(...arguments_);
+        if (cacheAdapter instanceof src) {
+            this.cache = cacheAdapter;
+        }
+        else if (typeof cacheAdapter === 'string') {
+            this.cache = new src({
+                uri: cacheAdapter,
+                namespace: 'cacheable-request',
+            });
+        }
+        else {
+            this.cache = new src({
+                store: cacheAdapter,
+                namespace: 'cacheable-request',
+            });
+        }
+        this.request = this.request.bind(this);
+        this.cacheRequest = cacheRequest;
+    }
+}
+const entries = Object.entries;
+const cloneResponse = (response) => {
+    const clone = new external_node_stream_.PassThrough({ autoDestroy: false });
+    mimicResponse(response, clone);
+    return response.pipe(clone);
+};
+const urlObjectToRequestOptions = (url) => {
+    const options = { ...url };
+    options.path = `${url.pathname || '/'}${url.search || ''}`;
+    delete options.pathname;
+    delete options.search;
+    return options;
+};
+const normalizeUrlObject = (url) => 
+// If url was parsed by url.parse or new URL:
+// - hostname will be set
+// - host will be hostname[:port]
+// - port will be set if it was explicit in the parsed string
+// Otherwise, url was from request options:
+// - hostname or host may be set
+// - host shall not have port encoded
+({
+    protocol: url.protocol,
+    auth: url.auth,
+    hostname: url.hostname || url.host || 'localhost',
+    port: url.port,
+    pathname: url.pathname,
+    search: url.search,
+});
+const convertHeaders = (headers) => {
+    const result = [];
+    for (const name of Object.keys(headers)) {
+        result[name.toLowerCase()] = headers[name];
+    }
+    return result;
+};
+/* harmony default export */ const dist = (CacheableRequest);
+
+const onResponse = 'onResponse';
+//# sourceMappingURL=index.js.map
+// EXTERNAL MODULE: ./node_modules/.pnpm/decompress-response@6.0.0/node_modules/decompress-response/index.js
+var decompress_response = __nccwpck_require__(4010);
+;// CONCATENATED MODULE: ./node_modules/.pnpm/form-data-encoder@4.0.2/node_modules/form-data-encoder/lib/index.js
+var __accessCheck = (obj, member, msg) => {
+  if (!member.has(obj))
+    throw TypeError("Cannot " + msg);
+};
+var __privateGet = (obj, member, getter) => {
+  __accessCheck(obj, member, "read from private field");
+  return getter ? getter.call(obj) : member.get(obj);
+};
+var __privateAdd = (obj, member, value) => {
+  if (member.has(obj))
+    throw TypeError("Cannot add the same private member more than once");
+  member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
+};
+var __privateSet = (obj, member, value, setter) => {
+  __accessCheck(obj, member, "write to private field");
+  setter ? setter.call(obj, value) : member.set(obj, value);
+  return value;
+};
+var __privateMethod = (obj, member, method) => {
+  __accessCheck(obj, member, "access private method");
+  return method;
+};
+
+// src/util/isFunction.ts
+var lib_isFunction = (value) => typeof value === "function";
+
+// src/util/isAsyncIterable.ts
+var lib_isAsyncIterable = (value) => lib_isFunction(value[Symbol.asyncIterator]);
+
+// src/util/chunk.ts
+var MAX_CHUNK_SIZE = 65536;
+function* chunk(value) {
+  if (value.byteLength <= MAX_CHUNK_SIZE) {
+    yield value;
+    return;
+  }
+  let offset = 0;
+  while (offset < value.byteLength) {
+    const size = Math.min(value.byteLength - offset, MAX_CHUNK_SIZE);
+    const buffer = value.buffer.slice(offset, offset + size);
+    offset += buffer.byteLength;
+    yield new Uint8Array(buffer);
+  }
+}
+
+// src/util/getStreamIterator.ts
+async function* readStream(readable) {
+  const reader = readable.getReader();
+  while (true) {
+    const { done, value } = await reader.read();
+    if (done) {
+      break;
+    }
+    yield value;
+  }
+}
+async function* chunkStream(stream) {
+  for await (const value of stream) {
+    yield* chunk(value);
+  }
+}
+var getStreamIterator = (source) => {
+  if (lib_isAsyncIterable(source)) {
+    return chunkStream(source);
+  }
+  if (lib_isFunction(source.getReader)) {
+    return chunkStream(readStream(source));
+  }
+  throw new TypeError(
+    "Unsupported data source: Expected either ReadableStream or async iterable."
+  );
+};
+
+// src/util/createBoundary.ts
+var alphabet = "abcdefghijklmnopqrstuvwxyz0123456789";
+function createBoundary() {
+  let size = 16;
+  let res = "";
+  while (size--) {
+    res += alphabet[Math.random() * alphabet.length << 0];
+  }
+  return res;
+}
+
+// src/util/normalizeValue.ts
+var normalizeValue = (value) => String(value).replace(/\r|\n/g, (match, i, str) => {
+  if (match === "\r" && str[i + 1] !== "\n" || match === "\n" && str[i - 1] !== "\r") {
+    return "\r\n";
+  }
+  return match;
+});
+
+// src/util/isPlainObject.ts
+var getType = (value) => Object.prototype.toString.call(value).slice(8, -1).toLowerCase();
+function lib_isPlainObject(value) {
+  if (getType(value) !== "object") {
+    return false;
+  }
+  const pp = Object.getPrototypeOf(value);
+  if (pp === null || pp === void 0) {
+    return true;
+  }
+  const Ctor = pp.constructor && pp.constructor.toString();
+  return Ctor === Object.toString();
+}
+
+// src/util/proxyHeaders.ts
+function getProperty(target, prop) {
+  if (typeof prop === "string") {
+    for (const [name, value] of Object.entries(target)) {
+      if (prop.toLowerCase() === name.toLowerCase()) {
+        return value;
+      }
+    }
+  }
+  return void 0;
+}
+var proxyHeaders = (object) => new Proxy(
+  object,
+  {
+    get: (target, prop) => getProperty(target, prop),
+    has: (target, prop) => getProperty(target, prop) !== void 0
+  }
+);
+
+// src/util/isFormData.ts
+var lib_isFormData = (value) => Boolean(
+  value && lib_isFunction(value.constructor) && value[Symbol.toStringTag] === "FormData" && lib_isFunction(value.append) && lib_isFunction(value.getAll) && lib_isFunction(value.entries) && lib_isFunction(value[Symbol.iterator])
+);
+
+// src/util/escapeName.ts
+var escapeName = (name) => String(name).replace(/\r/g, "%0D").replace(/\n/g, "%0A").replace(/"/g, "%22");
+
+// src/util/isFile.ts
+var isFile = (value) => Boolean(
+  value && typeof value === "object" && lib_isFunction(value.constructor) && value[Symbol.toStringTag] === "File" && lib_isFunction(value.stream) && value.name != null
+);
+
+// src/FormDataEncoder.ts
+var defaultOptions = {
+  enableAdditionalHeaders: false
+};
+var readonlyProp = { writable: false, configurable: false };
+var _CRLF, _CRLF_BYTES, _CRLF_BYTES_LENGTH, _DASHES, _encoder, _footer, _form, _options, _getFieldHeader, getFieldHeader_fn, _getContentLength, getContentLength_fn;
+var FormDataEncoder = class {
+  constructor(form, boundaryOrOptions, options) {
+    __privateAdd(this, _getFieldHeader);
+    /**
+     * Returns form-data content length
+     */
+    __privateAdd(this, _getContentLength);
+    __privateAdd(this, _CRLF, "\r\n");
+    __privateAdd(this, _CRLF_BYTES, void 0);
+    __privateAdd(this, _CRLF_BYTES_LENGTH, void 0);
+    __privateAdd(this, _DASHES, "-".repeat(2));
+    /**
+     * TextEncoder instance
+     */
+    __privateAdd(this, _encoder, new TextEncoder());
+    /**
+     * Returns form-data footer bytes
+     */
+    __privateAdd(this, _footer, void 0);
+    /**
+     * FormData instance
+     */
+    __privateAdd(this, _form, void 0);
+    /**
+     * Instance options
+     */
+    __privateAdd(this, _options, void 0);
+    if (!lib_isFormData(form)) {
+      throw new TypeError("Expected first argument to be a FormData instance.");
+    }
+    let boundary;
+    if (lib_isPlainObject(boundaryOrOptions)) {
+      options = boundaryOrOptions;
+    } else {
+      boundary = boundaryOrOptions;
+    }
+    if (!boundary) {
+      boundary = createBoundary();
+    }
+    if (typeof boundary !== "string") {
+      throw new TypeError("Expected boundary argument to be a string.");
+    }
+    if (options && !lib_isPlainObject(options)) {
+      throw new TypeError("Expected options argument to be an object.");
+    }
+    __privateSet(this, _form, Array.from(form.entries()));
+    __privateSet(this, _options, { ...defaultOptions, ...options });
+    __privateSet(this, _CRLF_BYTES, __privateGet(this, _encoder).encode(__privateGet(this, _CRLF)));
+    __privateSet(this, _CRLF_BYTES_LENGTH, __privateGet(this, _CRLF_BYTES).byteLength);
+    this.boundary = `form-data-boundary-${boundary}`;
+    this.contentType = `multipart/form-data; boundary=${this.boundary}`;
+    __privateSet(this, _footer, __privateGet(this, _encoder).encode(
+      `${__privateGet(this, _DASHES)}${this.boundary}${__privateGet(this, _DASHES)}${__privateGet(this, _CRLF).repeat(2)}`
+    ));
+    const headers = {
+      "Content-Type": this.contentType
+    };
+    const contentLength = __privateMethod(this, _getContentLength, getContentLength_fn).call(this);
+    if (contentLength) {
+      this.contentLength = contentLength;
+      headers["Content-Length"] = contentLength;
+    }
+    this.headers = proxyHeaders(Object.freeze(headers));
+    Object.defineProperties(this, {
+      boundary: readonlyProp,
+      contentType: readonlyProp,
+      contentLength: readonlyProp,
+      headers: readonlyProp
+    });
+  }
+  /**
+   * Creates an iterator allowing to go through form-data parts (with metadata).
+   * This method **will not** read the files and **will not** split values big into smaller chunks.
+   *
+   * Using this method, you can convert form-data content into Blob:
+   *
+   * @example
+   *
+   * ```ts
+   * import {Readable} from "stream"
+   *
+   * import {FormDataEncoder} from "form-data-encoder"
+   *
+   * import {FormData} from "formdata-polyfill/esm-min.js"
+   * import {fileFrom} from "fetch-blob/form.js"
+   * import {File} from "fetch-blob/file.js"
+   * import {Blob} from "fetch-blob"
+   *
+   * import fetch from "node-fetch"
+   *
+   * const form = new FormData()
+   *
+   * form.set("field", "Just a random string")
+   * form.set("file", new File(["Using files is class amazing"]))
+   * form.set("fileFromPath", await fileFrom("path/to/a/file.txt"))
+   *
+   * const encoder = new FormDataEncoder(form)
+   *
+   * const options = {
+   *   method: "post",
+   *   body: new Blob(encoder, {type: encoder.contentType})
+   * }
+   *
+   * const response = await fetch("https://httpbin.org/post", options)
+   *
+   * console.log(await response.json())
+   * ```
+   */
+  *values() {
+    for (const [name, raw] of __privateGet(this, _form)) {
+      const value = isFile(raw) ? raw : __privateGet(this, _encoder).encode(
+        normalizeValue(raw)
+      );
+      yield __privateMethod(this, _getFieldHeader, getFieldHeader_fn).call(this, name, value);
+      yield value;
+      yield __privateGet(this, _CRLF_BYTES);
+    }
+    yield __privateGet(this, _footer);
+  }
+  /**
+   * Creates an async iterator allowing to perform the encoding by portions.
+   * This method reads through files and splits big values into smaller pieces (65536 bytes per each).
+   *
+   * @example
+   *
+   * ```ts
+   * import {Readable} from "stream"
+   *
+   * import {FormData, File, fileFromPath} from "formdata-node"
+   * import {FormDataEncoder} from "form-data-encoder"
+   *
+   * import fetch from "node-fetch"
+   *
+   * const form = new FormData()
+   *
+   * form.set("field", "Just a random string")
+   * form.set("file", new File(["Using files is class amazing"], "file.txt"))
+   * form.set("fileFromPath", await fileFromPath("path/to/a/file.txt"))
+   *
+   * const encoder = new FormDataEncoder(form)
+   *
+   * const options = {
+   *   method: "post",
+   *   headers: encoder.headers,
+   *   body: Readable.from(encoder.encode()) // or Readable.from(encoder)
+   * }
+   *
+   * const response = await fetch("https://httpbin.org/post", options)
+   *
+   * console.log(await response.json())
+   * ```
+   */
+  async *encode() {
+    for (const part of this.values()) {
+      if (isFile(part)) {
+        yield* getStreamIterator(part.stream());
+      } else {
+        yield* chunk(part);
+      }
+    }
+  }
+  /**
+   * Creates an iterator allowing to read through the encoder data using for...of loops
+   */
+  [Symbol.iterator]() {
+    return this.values();
+  }
+  /**
+   * Creates an **async** iterator allowing to read through the encoder data using for-await...of loops
+   */
+  [Symbol.asyncIterator]() {
+    return this.encode();
+  }
+};
+_CRLF = new WeakMap();
+_CRLF_BYTES = new WeakMap();
+_CRLF_BYTES_LENGTH = new WeakMap();
+_DASHES = new WeakMap();
+_encoder = new WeakMap();
+_footer = new WeakMap();
+_form = new WeakMap();
+_options = new WeakMap();
+_getFieldHeader = new WeakSet();
+getFieldHeader_fn = function(name, value) {
+  let header = "";
+  header += `${__privateGet(this, _DASHES)}${this.boundary}${__privateGet(this, _CRLF)}`;
+  header += `Content-Disposition: form-data; name="${escapeName(name)}"`;
+  if (isFile(value)) {
+    header += `; filename="${escapeName(value.name)}"${__privateGet(this, _CRLF)}`;
+    header += `Content-Type: ${value.type || "application/octet-stream"}`;
+  }
+  if (__privateGet(this, _options).enableAdditionalHeaders === true) {
+    const size = isFile(value) ? value.size : value.byteLength;
+    if (size != null && !isNaN(size)) {
+      header += `${__privateGet(this, _CRLF)}Content-Length: ${size}`;
+    }
+  }
+  return __privateGet(this, _encoder).encode(`${header}${__privateGet(this, _CRLF).repeat(2)}`);
+};
+_getContentLength = new WeakSet();
+getContentLength_fn = function() {
+  let length = 0;
+  for (const [name, raw] of __privateGet(this, _form)) {
+    const value = isFile(raw) ? raw : __privateGet(this, _encoder).encode(
+      normalizeValue(raw)
+    );
+    const size = isFile(value) ? value.size : value.byteLength;
+    if (size == null || isNaN(size)) {
+      return void 0;
+    }
+    length += __privateMethod(this, _getFieldHeader, getFieldHeader_fn).call(this, name, value).byteLength;
+    length += size;
+    length += __privateGet(this, _CRLF_BYTES_LENGTH);
+  }
+  return String(length + __privateGet(this, _footer).byteLength);
+};
+
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/is-form-data.js
+
+function is_form_data_isFormData(body) {
+    return distribution.nodeStream(body) && distribution.function(body.getBoundary);
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/get-body-size.js
+
+
+
+
+async function getBodySize(body, headers) {
+    if (headers && 'content-length' in headers) {
+        return Number(headers['content-length']);
+    }
+    if (!body) {
+        return 0;
+    }
+    if (distribution.string(body)) {
+        return external_node_buffer_namespaceObject.Buffer.byteLength(body);
+    }
+    if (distribution.buffer(body)) {
+        return body.length;
+    }
+    if (is_form_data_isFormData(body)) {
+        return (0,external_node_util_.promisify)(body.getLength.bind(body))();
+    }
+    return undefined;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/proxy-events.js
+function proxyEvents(from, to, events) {
+    const eventFunctions = {};
+    for (const event of events) {
+        const eventFunction = (...arguments_) => {
+            to.emit(event, ...arguments_);
+        };
+        eventFunctions[event] = eventFunction;
+        from.on(event, eventFunction);
+    }
+    return () => {
+        for (const [event, eventFunction] of Object.entries(eventFunctions)) {
+            from.off(event, eventFunction);
+        }
+    };
+}
+
+;// CONCATENATED MODULE: external "node:net"
+const external_node_net_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:net");
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/unhandle.js
+// When attaching listeners, it's very easy to forget about them.
+// Especially if you do error handling and set timeouts.
+// So instead of checking if it's proper to throw an error on every timeout ever,
+// use this simple tool which will remove all listeners you have attached.
+function unhandle() {
+    const handlers = [];
+    return {
+        once(origin, event, function_) {
+            origin.once(event, function_);
+            handlers.push({ origin, event, fn: function_ });
+        },
+        unhandleAll() {
+            for (const handler of handlers) {
+                const { origin, event, fn } = handler;
+                origin.removeListener(event, fn);
+            }
+            handlers.length = 0;
+        },
+    };
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/timed-out.js
+
+
+const reentry = Symbol('reentry');
+const timed_out_noop = () => { };
+class timed_out_TimeoutError extends Error {
+    event;
+    code;
+    constructor(threshold, event) {
+        super(`Timeout awaiting '${event}' for ${threshold}ms`);
+        this.event = event;
+        this.name = 'TimeoutError';
+        this.code = 'ETIMEDOUT';
+    }
+}
+function timedOut(request, delays, options) {
+    if (reentry in request) {
+        return timed_out_noop;
+    }
+    request[reentry] = true;
+    const cancelers = [];
+    const { once, unhandleAll } = unhandle();
+    const addTimeout = (delay, callback, event) => {
+        const timeout = setTimeout(callback, delay, delay, event);
+        timeout.unref?.();
+        const cancel = () => {
+            clearTimeout(timeout);
+        };
+        cancelers.push(cancel);
+        return cancel;
+    };
+    const { host, hostname } = options;
+    const timeoutHandler = (delay, event) => {
+        request.destroy(new timed_out_TimeoutError(delay, event));
+    };
+    const cancelTimeouts = () => {
+        for (const cancel of cancelers) {
+            cancel();
+        }
+        unhandleAll();
+    };
+    request.once('error', error => {
+        cancelTimeouts();
+        // Save original behavior
+        /* istanbul ignore next */
+        if (request.listenerCount('error') === 0) {
+            throw error;
+        }
+    });
+    if (delays.request !== undefined) {
+        const cancelTimeout = addTimeout(delays.request, timeoutHandler, 'request');
+        once(request, 'response', (response) => {
+            once(response, 'end', cancelTimeout);
+        });
+    }
+    if (delays.socket !== undefined) {
+        const { socket } = delays;
+        const socketTimeoutHandler = () => {
+            timeoutHandler(socket, 'socket');
+        };
+        request.setTimeout(socket, socketTimeoutHandler);
+        // `request.setTimeout(0)` causes a memory leak.
+        // We can just remove the listener and forget about the timer - it's unreffed.
+        // See https://github.com/sindresorhus/got/issues/690
+        cancelers.push(() => {
+            request.removeListener('timeout', socketTimeoutHandler);
+        });
+    }
+    const hasLookup = delays.lookup !== undefined;
+    const hasConnect = delays.connect !== undefined;
+    const hasSecureConnect = delays.secureConnect !== undefined;
+    const hasSend = delays.send !== undefined;
+    if (hasLookup || hasConnect || hasSecureConnect || hasSend) {
+        once(request, 'socket', (socket) => {
+            const { socketPath } = request;
+            /* istanbul ignore next: hard to test */
+            if (socket.connecting) {
+                const hasPath = Boolean(socketPath ?? external_node_net_namespaceObject.isIP(hostname ?? host ?? '') !== 0);
+                if (hasLookup && !hasPath && socket.address().address === undefined) {
+                    const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');
+                    once(socket, 'lookup', cancelTimeout);
+                }
+                if (hasConnect) {
+                    const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');
+                    if (hasPath) {
+                        once(socket, 'connect', timeConnect());
+                    }
+                    else {
+                        once(socket, 'lookup', (error) => {
+                            if (error === null) {
+                                once(socket, 'connect', timeConnect());
+                            }
+                        });
+                    }
+                }
+                if (hasSecureConnect && options.protocol === 'https:') {
+                    once(socket, 'connect', () => {
+                        const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');
+                        once(socket, 'secureConnect', cancelTimeout);
+                    });
+                }
+            }
+            if (hasSend) {
+                const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');
+                /* istanbul ignore next: hard to test */
+                if (socket.connecting) {
+                    once(socket, 'connect', () => {
+                        once(request, 'upload-complete', timeRequest());
+                    });
+                }
+                else {
+                    once(request, 'upload-complete', timeRequest());
+                }
+            }
+        });
+    }
+    if (delays.response !== undefined) {
+        once(request, 'upload-complete', () => {
+            const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');
+            once(request, 'response', cancelTimeout);
+        });
+    }
+    if (delays.read !== undefined) {
+        once(request, 'response', (response) => {
+            const cancelTimeout = addTimeout(delays.read, timeoutHandler, 'read');
+            once(response, 'end', cancelTimeout);
+        });
+    }
+    return cancelTimeouts;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/url-to-options.js
+
+function urlToOptions(url) {
+    // Cast to URL
+    url = url;
+    const options = {
+        protocol: url.protocol,
+        hostname: distribution.string(url.hostname) && url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
+        host: url.host,
+        hash: url.hash,
+        search: url.search,
+        pathname: url.pathname,
+        href: url.href,
+        path: `${url.pathname || ''}${url.search || ''}`,
+    };
+    if (distribution.string(url.port) && url.port.length > 0) {
+        options.port = Number(url.port);
+    }
+    if (url.username || url.password) {
+        options.auth = `${url.username || ''}:${url.password || ''}`;
+    }
+    return options;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/weakable-map.js
+class WeakableMap {
+    weakMap;
+    map;
+    constructor() {
+        this.weakMap = new WeakMap();
+        this.map = new Map();
+    }
+    set(key, value) {
+        if (typeof key === 'object') {
+            this.weakMap.set(key, value);
+        }
+        else {
+            this.map.set(key, value);
+        }
+    }
+    get(key) {
+        if (typeof key === 'object') {
+            return this.weakMap.get(key);
+        }
+        return this.map.get(key);
+    }
+    has(key) {
+        if (typeof key === 'object') {
+            return this.weakMap.has(key);
+        }
+        return this.map.has(key);
+    }
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/calculate-retry-delay.js
+const calculateRetryDelay = ({ attemptCount, retryOptions, error, retryAfter, computedValue, }) => {
+    if (error.name === 'RetryError') {
+        return 1;
+    }
+    if (attemptCount > retryOptions.limit) {
+        return 0;
+    }
+    const hasMethod = retryOptions.methods.includes(error.options.method);
+    const hasErrorCode = retryOptions.errorCodes.includes(error.code);
+    const hasStatusCode = error.response && retryOptions.statusCodes.includes(error.response.statusCode);
+    if (!hasMethod || (!hasErrorCode && !hasStatusCode)) {
+        return 0;
+    }
+    if (error.response) {
+        if (retryAfter) {
+            // In this case `computedValue` is `options.request.timeout`
+            if (retryAfter > computedValue) {
+                return 0;
+            }
+            return retryAfter;
+        }
+        if (error.response.statusCode === 413) {
+            return 0;
+        }
+    }
+    const noise = Math.random() * retryOptions.noise;
+    return Math.min(((2 ** (attemptCount - 1)) * 1000), retryOptions.backoffLimit) + noise;
+};
+/* harmony default export */ const calculate_retry_delay = (calculateRetryDelay);
+
+;// CONCATENATED MODULE: external "node:tls"
+const external_node_tls_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:tls");
+// EXTERNAL MODULE: external "node:https"
+var external_node_https_ = __nccwpck_require__(4708);
+;// CONCATENATED MODULE: external "node:dns"
+const external_node_dns_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:dns");
+;// CONCATENATED MODULE: ./node_modules/.pnpm/cacheable-lookup@7.0.0/node_modules/cacheable-lookup/source/index.js
+
+
+
+
+const {Resolver: AsyncResolver} = external_node_dns_namespaceObject.promises;
+
+const kCacheableLookupCreateConnection = Symbol('cacheableLookupCreateConnection');
+const kCacheableLookupInstance = Symbol('cacheableLookupInstance');
+const kExpires = Symbol('expires');
+
+const supportsALL = typeof external_node_dns_namespaceObject.ALL === 'number';
+
+const verifyAgent = agent => {
+	if (!(agent && typeof agent.createConnection === 'function')) {
+		throw new Error('Expected an Agent instance as the first argument');
+	}
+};
+
+const map4to6 = entries => {
+	for (const entry of entries) {
+		if (entry.family === 6) {
+			continue;
+		}
+
+		entry.address = `::ffff:${entry.address}`;
+		entry.family = 6;
+	}
+};
+
+const getIfaceInfo = () => {
+	let has4 = false;
+	let has6 = false;
+
+	for (const device of Object.values(external_node_os_.networkInterfaces())) {
+		for (const iface of device) {
+			if (iface.internal) {
+				continue;
+			}
+
+			if (iface.family === 'IPv6') {
+				has6 = true;
+			} else {
+				has4 = true;
+			}
+
+			if (has4 && has6) {
+				return {has4, has6};
+			}
+		}
+	}
+
+	return {has4, has6};
+};
+
+const source_isIterable = map => {
+	return Symbol.iterator in map;
+};
+
+const ignoreNoResultErrors = dnsPromise => {
+	return dnsPromise.catch(error => {
+		if (
+			error.code === 'ENODATA' ||
+			error.code === 'ENOTFOUND' ||
+			error.code === 'ENOENT' // Windows: name exists, but not this record type
+		) {
+			return [];
+		}
+
+		throw error;
+	});
+};
+
+const ttl = {ttl: true};
+const source_all = {all: true};
+const all4 = {all: true, family: 4};
+const all6 = {all: true, family: 6};
+
+class CacheableLookup {
+	constructor({
+		cache = new Map(),
+		maxTtl = Infinity,
+		fallbackDuration = 3600,
+		errorTtl = 0.15,
+		resolver = new AsyncResolver(),
+		lookup = external_node_dns_namespaceObject.lookup
+	} = {}) {
+		this.maxTtl = maxTtl;
+		this.errorTtl = errorTtl;
+
+		this._cache = cache;
+		this._resolver = resolver;
+		this._dnsLookup = lookup && (0,external_node_util_.promisify)(lookup);
+		this.stats = {
+			cache: 0,
+			query: 0
+		};
+
+		if (this._resolver instanceof AsyncResolver) {
+			this._resolve4 = this._resolver.resolve4.bind(this._resolver);
+			this._resolve6 = this._resolver.resolve6.bind(this._resolver);
+		} else {
+			this._resolve4 = (0,external_node_util_.promisify)(this._resolver.resolve4.bind(this._resolver));
+			this._resolve6 = (0,external_node_util_.promisify)(this._resolver.resolve6.bind(this._resolver));
+		}
+
+		this._iface = getIfaceInfo();
+
+		this._pending = {};
+		this._nextRemovalTime = false;
+		this._hostnamesToFallback = new Set();
+
+		this.fallbackDuration = fallbackDuration;
+
+		if (fallbackDuration > 0) {
+			const interval = setInterval(() => {
+				this._hostnamesToFallback.clear();
+			}, fallbackDuration * 1000);
+
+			/* istanbul ignore next: There is no `interval.unref()` when running inside an Electron renderer */
+			if (interval.unref) {
+				interval.unref();
+			}
+
+			this._fallbackInterval = interval;
+		}
+
+		this.lookup = this.lookup.bind(this);
+		this.lookupAsync = this.lookupAsync.bind(this);
+	}
+
+	set servers(servers) {
+		this.clear();
+
+		this._resolver.setServers(servers);
+	}
+
+	get servers() {
+		return this._resolver.getServers();
+	}
+
+	lookup(hostname, options, callback) {
+		if (typeof options === 'function') {
+			callback = options;
+			options = {};
+		} else if (typeof options === 'number') {
+			options = {
+				family: options
+			};
+		}
+
+		if (!callback) {
+			throw new Error('Callback must be a function.');
+		}
+
+		// eslint-disable-next-line promise/prefer-await-to-then
+		this.lookupAsync(hostname, options).then(result => {
+			if (options.all) {
+				callback(null, result);
+			} else {
+				callback(null, result.address, result.family, result.expires, result.ttl, result.source);
+			}
+		}, callback);
+	}
+
+	async lookupAsync(hostname, options = {}) {
+		if (typeof options === 'number') {
+			options = {
+				family: options
+			};
+		}
+
+		let cached = await this.query(hostname);
+
+		if (options.family === 6) {
+			const filtered = cached.filter(entry => entry.family === 6);
+
+			if (options.hints & external_node_dns_namespaceObject.V4MAPPED) {
+				if ((supportsALL && options.hints & external_node_dns_namespaceObject.ALL) || filtered.length === 0) {
+					map4to6(cached);
+				} else {
+					cached = filtered;
+				}
+			} else {
+				cached = filtered;
+			}
+		} else if (options.family === 4) {
+			cached = cached.filter(entry => entry.family === 4);
+		}
+
+		if (options.hints & external_node_dns_namespaceObject.ADDRCONFIG) {
+			const {_iface} = this;
+			cached = cached.filter(entry => entry.family === 6 ? _iface.has6 : _iface.has4);
+		}
+
+		if (cached.length === 0) {
+			const error = new Error(`cacheableLookup ENOTFOUND ${hostname}`);
+			error.code = 'ENOTFOUND';
+			error.hostname = hostname;
+
+			throw error;
+		}
+
+		if (options.all) {
+			return cached;
+		}
+
+		return cached[0];
+	}
+
+	async query(hostname) {
+		let source = 'cache';
+		let cached = await this._cache.get(hostname);
+
+		if (cached) {
+			this.stats.cache++;
+		}
+
+		if (!cached) {
+			const pending = this._pending[hostname];
+			if (pending) {
+				this.stats.cache++;
+				cached = await pending;
+			} else {
+				source = 'query';
+				const newPromise = this.queryAndCache(hostname);
+				this._pending[hostname] = newPromise;
+				this.stats.query++;
+				try {
+					cached = await newPromise;
+				} finally {
+					delete this._pending[hostname];
+				}
+			}
+		}
+
+		cached = cached.map(entry => {
+			return {...entry, source};
+		});
+
+		return cached;
+	}
+
+	async _resolve(hostname) {
+		// ANY is unsafe as it doesn't trigger new queries in the underlying server.
+		const [A, AAAA] = await Promise.all([
+			ignoreNoResultErrors(this._resolve4(hostname, ttl)),
+			ignoreNoResultErrors(this._resolve6(hostname, ttl))
+		]);
+
+		let aTtl = 0;
+		let aaaaTtl = 0;
+		let cacheTtl = 0;
+
+		const now = Date.now();
+
+		for (const entry of A) {
+			entry.family = 4;
+			entry.expires = now + (entry.ttl * 1000);
+
+			aTtl = Math.max(aTtl, entry.ttl);
+		}
+
+		for (const entry of AAAA) {
+			entry.family = 6;
+			entry.expires = now + (entry.ttl * 1000);
+
+			aaaaTtl = Math.max(aaaaTtl, entry.ttl);
+		}
+
+		if (A.length > 0) {
+			if (AAAA.length > 0) {
+				cacheTtl = Math.min(aTtl, aaaaTtl);
+			} else {
+				cacheTtl = aTtl;
+			}
+		} else {
+			cacheTtl = aaaaTtl;
+		}
+
+		return {
+			entries: [
+				...A,
+				...AAAA
+			],
+			cacheTtl
+		};
+	}
+
+	async _lookup(hostname) {
+		try {
+			const [A, AAAA] = await Promise.all([
+				// Passing {all: true} doesn't return all IPv4 and IPv6 entries.
+				// See https://github.com/szmarczak/cacheable-lookup/issues/42
+				ignoreNoResultErrors(this._dnsLookup(hostname, all4)),
+				ignoreNoResultErrors(this._dnsLookup(hostname, all6))
+			]);
+
+			return {
+				entries: [
+					...A,
+					...AAAA
+				],
+				cacheTtl: 0
+			};
+		} catch {
+			return {
+				entries: [],
+				cacheTtl: 0
+			};
+		}
+	}
+
+	async _set(hostname, data, cacheTtl) {
+		if (this.maxTtl > 0 && cacheTtl > 0) {
+			cacheTtl = Math.min(cacheTtl, this.maxTtl) * 1000;
+			data[kExpires] = Date.now() + cacheTtl;
+
+			try {
+				await this._cache.set(hostname, data, cacheTtl);
+			} catch (error) {
+				this.lookupAsync = async () => {
+					const cacheError = new Error('Cache Error. Please recreate the CacheableLookup instance.');
+					cacheError.cause = error;
+
+					throw cacheError;
+				};
+			}
+
+			if (source_isIterable(this._cache)) {
+				this._tick(cacheTtl);
+			}
+		}
+	}
+
+	async queryAndCache(hostname) {
+		if (this._hostnamesToFallback.has(hostname)) {
+			return this._dnsLookup(hostname, source_all);
+		}
+
+		let query = await this._resolve(hostname);
+
+		if (query.entries.length === 0 && this._dnsLookup) {
+			query = await this._lookup(hostname);
+
+			if (query.entries.length !== 0 && this.fallbackDuration > 0) {
+				// Use `dns.lookup(...)` for that particular hostname
+				this._hostnamesToFallback.add(hostname);
+			}
+		}
+
+		const cacheTtl = query.entries.length === 0 ? this.errorTtl : query.cacheTtl;
+		await this._set(hostname, query.entries, cacheTtl);
+
+		return query.entries;
+	}
+
+	_tick(ms) {
+		const nextRemovalTime = this._nextRemovalTime;
+
+		if (!nextRemovalTime || ms < nextRemovalTime) {
+			clearTimeout(this._removalTimeout);
+
+			this._nextRemovalTime = ms;
+
+			this._removalTimeout = setTimeout(() => {
+				this._nextRemovalTime = false;
+
+				let nextExpiry = Infinity;
+
+				const now = Date.now();
+
+				for (const [hostname, entries] of this._cache) {
+					const expires = entries[kExpires];
+
+					if (now >= expires) {
+						this._cache.delete(hostname);
+					} else if (expires < nextExpiry) {
+						nextExpiry = expires;
+					}
+				}
+
+				if (nextExpiry !== Infinity) {
+					this._tick(nextExpiry - now);
+				}
+			}, ms);
+
+			/* istanbul ignore next: There is no `timeout.unref()` when running inside an Electron renderer */
+			if (this._removalTimeout.unref) {
+				this._removalTimeout.unref();
+			}
+		}
+	}
+
+	install(agent) {
+		verifyAgent(agent);
+
+		if (kCacheableLookupCreateConnection in agent) {
+			throw new Error('CacheableLookup has been already installed');
+		}
+
+		agent[kCacheableLookupCreateConnection] = agent.createConnection;
+		agent[kCacheableLookupInstance] = this;
+
+		agent.createConnection = (options, callback) => {
+			if (!('lookup' in options)) {
+				options.lookup = this.lookup;
+			}
+
+			return agent[kCacheableLookupCreateConnection](options, callback);
+		};
+	}
+
+	uninstall(agent) {
+		verifyAgent(agent);
+
+		if (agent[kCacheableLookupCreateConnection]) {
+			if (agent[kCacheableLookupInstance] !== this) {
+				throw new Error('The agent is not owned by this CacheableLookup instance');
+			}
+
+			agent.createConnection = agent[kCacheableLookupCreateConnection];
+
+			delete agent[kCacheableLookupCreateConnection];
+			delete agent[kCacheableLookupInstance];
+		}
+	}
+
+	updateInterfaceInfo() {
+		const {_iface} = this;
+
+		this._iface = getIfaceInfo();
+
+		if ((_iface.has4 && !this._iface.has4) || (_iface.has6 && !this._iface.has6)) {
+			this._cache.clear();
+		}
+	}
+
+	clear(hostname) {
+		if (hostname) {
+			this._cache.delete(hostname);
+			return;
+		}
+
+		this._cache.clear();
+	}
+}
+
+// EXTERNAL MODULE: ./node_modules/.pnpm/http2-wrapper@2.2.1/node_modules/http2-wrapper/source/index.js
+var http2_wrapper_source = __nccwpck_require__(5409);
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/parse-link-header.js
+function parseLinkHeader(link) {
+    const parsed = [];
+    const items = link.split(',');
+    for (const item of items) {
+        // https://tools.ietf.org/html/rfc5988#section-5
+        const [rawUriReference, ...rawLinkParameters] = item.split(';');
+        const trimmedUriReference = rawUriReference.trim();
+        // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with
+        if (trimmedUriReference[0] !== '<' || trimmedUriReference.at(-1) !== '>') {
+            throw new Error(`Invalid format of the Link header reference: ${trimmedUriReference}`);
+        }
+        const reference = trimmedUriReference.slice(1, -1);
+        const parameters = {};
+        if (rawLinkParameters.length === 0) {
+            throw new Error(`Unexpected end of Link header parameters: ${rawLinkParameters.join(';')}`);
+        }
+        for (const rawParameter of rawLinkParameters) {
+            const trimmedRawParameter = rawParameter.trim();
+            const center = trimmedRawParameter.indexOf('=');
+            if (center === -1) {
+                throw new Error(`Failed to parse Link header: ${link}`);
+            }
+            const name = trimmedRawParameter.slice(0, center).trim();
+            const value = trimmedRawParameter.slice(center + 1).trim();
+            parameters[name] = value;
+        }
+        parsed.push({
+            reference,
+            parameters,
+        });
+    }
+    return parsed;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/options.js
+
+
+
+// DO NOT use destructuring for `https.request` and `http.request` as it's not compatible with `nock`.
+
+
+
+
+
+
+
+
+const [major, minor] = external_node_process_.versions.node.split('.').map(Number);
+function validateSearchParameters(searchParameters) {
+    // eslint-disable-next-line guard-for-in
+    for (const key in searchParameters) {
+        const value = searchParameters[key];
+        assert.any([distribution.string, distribution.number, distribution.boolean, distribution.null, distribution.undefined], value);
+    }
+}
+const globalCache = new Map();
+let globalDnsCache;
+const getGlobalDnsCache = () => {
+    if (globalDnsCache) {
+        return globalDnsCache;
+    }
+    globalDnsCache = new CacheableLookup();
+    return globalDnsCache;
+};
+const defaultInternals = {
+    request: undefined,
+    agent: {
+        http: undefined,
+        https: undefined,
+        http2: undefined,
+    },
+    h2session: undefined,
+    decompress: true,
+    timeout: {
+        connect: undefined,
+        lookup: undefined,
+        read: undefined,
+        request: undefined,
+        response: undefined,
+        secureConnect: undefined,
+        send: undefined,
+        socket: undefined,
+    },
+    prefixUrl: '',
+    body: undefined,
+    form: undefined,
+    json: undefined,
+    cookieJar: undefined,
+    ignoreInvalidCookies: false,
+    searchParams: undefined,
+    dnsLookup: undefined,
+    dnsCache: undefined,
+    context: {},
+    hooks: {
+        init: [],
+        beforeRequest: [],
+        beforeError: [],
+        beforeRedirect: [],
+        beforeRetry: [],
+        afterResponse: [],
+    },
+    followRedirect: true,
+    maxRedirects: 10,
+    cache: undefined,
+    throwHttpErrors: true,
+    username: '',
+    password: '',
+    http2: false,
+    allowGetBody: false,
+    headers: {
+        'user-agent': 'got (https://github.com/sindresorhus/got)',
+    },
+    methodRewriting: false,
+    dnsLookupIpVersion: undefined,
+    parseJson: JSON.parse,
+    stringifyJson: JSON.stringify,
+    retry: {
+        limit: 2,
+        methods: [
+            'GET',
+            'PUT',
+            'HEAD',
+            'DELETE',
+            'OPTIONS',
+            'TRACE',
+        ],
+        statusCodes: [
+            408,
+            413,
+            429,
+            500,
+            502,
+            503,
+            504,
+            521,
+            522,
+            524,
+        ],
+        errorCodes: [
+            'ETIMEDOUT',
+            'ECONNRESET',
+            'EADDRINUSE',
+            'ECONNREFUSED',
+            'EPIPE',
+            'ENOTFOUND',
+            'ENETUNREACH',
+            'EAI_AGAIN',
+        ],
+        maxRetryAfter: undefined,
+        calculateDelay: ({ computedValue }) => computedValue,
+        backoffLimit: Number.POSITIVE_INFINITY,
+        noise: 100,
+    },
+    localAddress: undefined,
+    method: 'GET',
+    createConnection: undefined,
+    cacheOptions: {
+        shared: undefined,
+        cacheHeuristic: undefined,
+        immutableMinTimeToLive: undefined,
+        ignoreCargoCult: undefined,
+    },
+    https: {
+        alpnProtocols: undefined,
+        rejectUnauthorized: undefined,
+        checkServerIdentity: undefined,
+        certificateAuthority: undefined,
+        key: undefined,
+        certificate: undefined,
+        passphrase: undefined,
+        pfx: undefined,
+        ciphers: undefined,
+        honorCipherOrder: undefined,
+        minVersion: undefined,
+        maxVersion: undefined,
+        signatureAlgorithms: undefined,
+        tlsSessionLifetime: undefined,
+        dhparam: undefined,
+        ecdhCurve: undefined,
+        certificateRevocationLists: undefined,
+    },
+    encoding: undefined,
+    resolveBodyOnly: false,
+    isStream: false,
+    responseType: 'text',
+    url: undefined,
+    pagination: {
+        transform(response) {
+            if (response.request.options.responseType === 'json') {
+                return response.body;
+            }
+            return JSON.parse(response.body);
+        },
+        paginate({ response }) {
+            const rawLinkHeader = response.headers.link;
+            if (typeof rawLinkHeader !== 'string' || rawLinkHeader.trim() === '') {
+                return false;
+            }
+            const parsed = parseLinkHeader(rawLinkHeader);
+            const next = parsed.find(entry => entry.parameters.rel === 'next' || entry.parameters.rel === '"next"');
+            if (next) {
+                return {
+                    url: new URL(next.reference, response.url),
+                };
+            }
+            return false;
+        },
+        filter: () => true,
+        shouldContinue: () => true,
+        countLimit: Number.POSITIVE_INFINITY,
+        backoff: 0,
+        requestLimit: 10_000,
+        stackAllItems: false,
+    },
+    setHost: true,
+    maxHeaderSize: undefined,
+    signal: undefined,
+    enableUnixSockets: false,
+};
+const cloneInternals = (internals) => {
+    const { hooks, retry } = internals;
+    const result = {
+        ...internals,
+        context: { ...internals.context },
+        cacheOptions: { ...internals.cacheOptions },
+        https: { ...internals.https },
+        agent: { ...internals.agent },
+        headers: { ...internals.headers },
+        retry: {
+            ...retry,
+            errorCodes: [...retry.errorCodes],
+            methods: [...retry.methods],
+            statusCodes: [...retry.statusCodes],
+        },
+        timeout: { ...internals.timeout },
+        hooks: {
+            init: [...hooks.init],
+            beforeRequest: [...hooks.beforeRequest],
+            beforeError: [...hooks.beforeError],
+            beforeRedirect: [...hooks.beforeRedirect],
+            beforeRetry: [...hooks.beforeRetry],
+            afterResponse: [...hooks.afterResponse],
+        },
+        searchParams: internals.searchParams ? new URLSearchParams(internals.searchParams) : undefined,
+        pagination: { ...internals.pagination },
+    };
+    if (result.url !== undefined) {
+        result.prefixUrl = '';
+    }
+    return result;
+};
+const cloneRaw = (raw) => {
+    const { hooks, retry } = raw;
+    const result = { ...raw };
+    if (distribution.object(raw.context)) {
+        result.context = { ...raw.context };
+    }
+    if (distribution.object(raw.cacheOptions)) {
+        result.cacheOptions = { ...raw.cacheOptions };
+    }
+    if (distribution.object(raw.https)) {
+        result.https = { ...raw.https };
+    }
+    if (distribution.object(raw.cacheOptions)) {
+        result.cacheOptions = { ...result.cacheOptions };
+    }
+    if (distribution.object(raw.agent)) {
+        result.agent = { ...raw.agent };
+    }
+    if (distribution.object(raw.headers)) {
+        result.headers = { ...raw.headers };
+    }
+    if (distribution.object(retry)) {
+        result.retry = { ...retry };
+        if (distribution.array(retry.errorCodes)) {
+            result.retry.errorCodes = [...retry.errorCodes];
+        }
+        if (distribution.array(retry.methods)) {
+            result.retry.methods = [...retry.methods];
+        }
+        if (distribution.array(retry.statusCodes)) {
+            result.retry.statusCodes = [...retry.statusCodes];
+        }
+    }
+    if (distribution.object(raw.timeout)) {
+        result.timeout = { ...raw.timeout };
+    }
+    if (distribution.object(hooks)) {
+        result.hooks = {
+            ...hooks,
+        };
+        if (distribution.array(hooks.init)) {
+            result.hooks.init = [...hooks.init];
+        }
+        if (distribution.array(hooks.beforeRequest)) {
+            result.hooks.beforeRequest = [...hooks.beforeRequest];
+        }
+        if (distribution.array(hooks.beforeError)) {
+            result.hooks.beforeError = [...hooks.beforeError];
+        }
+        if (distribution.array(hooks.beforeRedirect)) {
+            result.hooks.beforeRedirect = [...hooks.beforeRedirect];
+        }
+        if (distribution.array(hooks.beforeRetry)) {
+            result.hooks.beforeRetry = [...hooks.beforeRetry];
+        }
+        if (distribution.array(hooks.afterResponse)) {
+            result.hooks.afterResponse = [...hooks.afterResponse];
+        }
+    }
+    // TODO: raw.searchParams
+    if (distribution.object(raw.pagination)) {
+        result.pagination = { ...raw.pagination };
+    }
+    return result;
+};
+const getHttp2TimeoutOption = (internals) => {
+    const delays = [internals.timeout.socket, internals.timeout.connect, internals.timeout.lookup, internals.timeout.request, internals.timeout.secureConnect].filter(delay => typeof delay === 'number');
+    if (delays.length > 0) {
+        return Math.min(...delays);
+    }
+    return undefined;
+};
+const init = (options, withOptions, self) => {
+    const initHooks = options.hooks?.init;
+    if (initHooks) {
+        for (const hook of initHooks) {
+            hook(withOptions, self);
+        }
+    }
+};
+class Options {
+    _unixOptions;
+    _internals;
+    _merging;
+    _init;
+    constructor(input, options, defaults) {
+        assert.any([distribution.string, distribution.urlInstance, distribution.object, distribution.undefined], input);
+        assert.any([distribution.object, distribution.undefined], options);
+        assert.any([distribution.object, distribution.undefined], defaults);
+        if (input instanceof Options || options instanceof Options) {
+            throw new TypeError('The defaults must be passed as the third argument');
+        }
+        this._internals = cloneInternals(defaults?._internals ?? defaults ?? defaultInternals);
+        this._init = [...(defaults?._init ?? [])];
+        this._merging = false;
+        this._unixOptions = undefined;
+        // This rule allows `finally` to be considered more important.
+        // Meaning no matter the error thrown in the `try` block,
+        // if `finally` throws then the `finally` error will be thrown.
+        //
+        // Yes, we want this. If we set `url` first, then the `url.searchParams`
+        // would get merged. Instead we set the `searchParams` first, then
+        // `url.searchParams` is overwritten as expected.
+        //
+        /* eslint-disable no-unsafe-finally */
+        try {
+            if (distribution.plainObject(input)) {
+                try {
+                    this.merge(input);
+                    this.merge(options);
+                }
+                finally {
+                    this.url = input.url;
+                }
+            }
+            else {
+                try {
+                    this.merge(options);
+                }
+                finally {
+                    if (options?.url !== undefined) {
+                        if (input === undefined) {
+                            this.url = options.url;
+                        }
+                        else {
+                            throw new TypeError('The `url` option is mutually exclusive with the `input` argument');
+                        }
+                    }
+                    else if (input !== undefined) {
+                        this.url = input;
+                    }
+                }
+            }
+        }
+        catch (error) {
+            error.options = this;
+            throw error;
+        }
+        /* eslint-enable no-unsafe-finally */
+    }
+    merge(options) {
+        if (!options) {
+            return;
+        }
+        if (options instanceof Options) {
+            for (const init of options._init) {
+                this.merge(init);
+            }
+            return;
+        }
+        options = cloneRaw(options);
+        init(this, options, this);
+        init(options, options, this);
+        this._merging = true;
+        // Always merge `isStream` first
+        if ('isStream' in options) {
+            this.isStream = options.isStream;
+        }
+        try {
+            let push = false;
+            for (const key in options) {
+                // `got.extend()` options
+                if (key === 'mutableDefaults' || key === 'handlers') {
+                    continue;
+                }
+                // Never merge `url`
+                if (key === 'url') {
+                    continue;
+                }
+                if (!(key in this)) {
+                    throw new Error(`Unexpected option: ${key}`);
+                }
+                // @ts-expect-error Type 'unknown' is not assignable to type 'never'.
+                const value = options[key];
+                if (value === undefined) {
+                    continue;
+                }
+                // @ts-expect-error Type 'unknown' is not assignable to type 'never'.
+                this[key] = value;
+                push = true;
+            }
+            if (push) {
+                this._init.push(options);
+            }
+        }
+        finally {
+            this._merging = false;
+        }
+    }
+    /**
+    Custom request function.
+    The main purpose of this is to [support HTTP2 using a wrapper](https://github.com/szmarczak/http2-wrapper).
+
+    @default http.request | https.request
+    */
+    get request() {
+        return this._internals.request;
+    }
+    set request(value) {
+        assert.any([distribution.function, distribution.undefined], value);
+        this._internals.request = value;
+    }
+    /**
+    An object representing `http`, `https` and `http2` keys for [`http.Agent`](https://nodejs.org/api/http.html#http_class_http_agent), [`https.Agent`](https://nodejs.org/api/https.html#https_class_https_agent) and [`http2wrapper.Agent`](https://github.com/szmarczak/http2-wrapper#new-http2agentoptions) instance.
+    This is necessary because a request to one protocol might redirect to another.
+    In such a scenario, Got will switch over to the right protocol agent for you.
+
+    If a key is not present, it will default to a global agent.
+
+    @example
+    ```
+    import got from 'got';
+    import HttpAgent from 'agentkeepalive';
+
+    const {HttpsAgent} = HttpAgent;
+
+    await got('https://sindresorhus.com', {
+        agent: {
+            http: new HttpAgent(),
+            https: new HttpsAgent()
+        }
+    });
+    ```
+    */
+    get agent() {
+        return this._internals.agent;
+    }
+    set agent(value) {
+        assert.plainObject(value);
+        // eslint-disable-next-line guard-for-in
+        for (const key in value) {
+            if (!(key in this._internals.agent)) {
+                throw new TypeError(`Unexpected agent option: ${key}`);
+            }
+            // @ts-expect-error - No idea why `value[key]` doesn't work here.
+            assert.any([distribution.object, distribution.undefined], value[key]);
+        }
+        if (this._merging) {
+            Object.assign(this._internals.agent, value);
+        }
+        else {
+            this._internals.agent = { ...value };
+        }
+    }
+    get h2session() {
+        return this._internals.h2session;
+    }
+    set h2session(value) {
+        this._internals.h2session = value;
+    }
+    /**
+    Decompress the response automatically.
+
+    This will set the `accept-encoding` header to `gzip, deflate, br` unless you set it yourself.
+
+    If this is disabled, a compressed response is returned as a `Buffer`.
+    This may be useful if you want to handle decompression yourself or stream the raw compressed data.
+
+    @default true
+    */
+    get decompress() {
+        return this._internals.decompress;
+    }
+    set decompress(value) {
+        assert.boolean(value);
+        this._internals.decompress = value;
+    }
+    /**
+    Milliseconds to wait for the server to end the response before aborting the request with `got.TimeoutError` error (a.k.a. `request` property).
+    By default, there's no timeout.
+
+    This also accepts an `object` with the following fields to constrain the duration of each phase of the request lifecycle:
+
+    - `lookup` starts when a socket is assigned and ends when the hostname has been resolved.
+        Does not apply when using a Unix domain socket.
+    - `connect` starts when `lookup` completes (or when the socket is assigned if lookup does not apply to the request) and ends when the socket is connected.
+    - `secureConnect` starts when `connect` completes and ends when the handshaking process completes (HTTPS only).
+    - `socket` starts when the socket is connected. See [request.setTimeout](https://nodejs.org/api/http.html#http_request_settimeout_timeout_callback).
+    - `response` starts when the request has been written to the socket and ends when the response headers are received.
+    - `send` starts when the socket is connected and ends with the request has been written to the socket.
+    - `request` starts when the request is initiated and ends when the response's end event fires.
+    */
+    get timeout() {
+        // We always return `Delays` here.
+        // It has to be `Delays | number`, otherwise TypeScript will error because the getter and the setter have incompatible types.
+        return this._internals.timeout;
+    }
+    set timeout(value) {
+        assert.plainObject(value);
+        // eslint-disable-next-line guard-for-in
+        for (const key in value) {
+            if (!(key in this._internals.timeout)) {
+                throw new Error(`Unexpected timeout option: ${key}`);
+            }
+            // @ts-expect-error - No idea why `value[key]` doesn't work here.
+            assert.any([distribution.number, distribution.undefined], value[key]);
+        }
+        if (this._merging) {
+            Object.assign(this._internals.timeout, value);
+        }
+        else {
+            this._internals.timeout = { ...value };
+        }
+    }
+    /**
+    When specified, `prefixUrl` will be prepended to `url`.
+    The prefix can be any valid URL, either relative or absolute.
+    A trailing slash `/` is optional - one will be added automatically.
+
+    __Note__: `prefixUrl` will be ignored if the `url` argument is a URL instance.
+
+    __Note__: Leading slashes in `input` are disallowed when using this option to enforce consistency and avoid confusion.
+    For example, when the prefix URL is `https://example.com/foo` and the input is `/bar`, there's ambiguity whether the resulting URL would become `https://example.com/foo/bar` or `https://example.com/bar`.
+    The latter is used by browsers.
+
+    __Tip__: Useful when used with `got.extend()` to create niche-specific Got instances.
+
+    __Tip__: You can change `prefixUrl` using hooks as long as the URL still includes the `prefixUrl`.
+    If the URL doesn't include it anymore, it will throw.
+
+    @example
+    ```
+    import got from 'got';
+
+    await got('unicorn', {prefixUrl: 'https://cats.com'});
+    //=> 'https://cats.com/unicorn'
+
+    const instance = got.extend({
+        prefixUrl: 'https://google.com'
+    });
+
+    await instance('unicorn', {
+        hooks: {
+            beforeRequest: [
+                options => {
+                    options.prefixUrl = 'https://cats.com';
+                }
+            ]
+        }
+    });
+    //=> 'https://cats.com/unicorn'
+    ```
+    */
+    get prefixUrl() {
+        // We always return `string` here.
+        // It has to be `string | URL`, otherwise TypeScript will error because the getter and the setter have incompatible types.
+        return this._internals.prefixUrl;
+    }
+    set prefixUrl(value) {
+        assert.any([distribution.string, distribution.urlInstance], value);
+        if (value === '') {
+            this._internals.prefixUrl = '';
+            return;
+        }
+        value = value.toString();
+        if (!value.endsWith('/')) {
+            value += '/';
+        }
+        if (this._internals.prefixUrl && this._internals.url) {
+            const { href } = this._internals.url;
+            this._internals.url.href = value + href.slice(this._internals.prefixUrl.length);
+        }
+        this._internals.prefixUrl = value;
+    }
+    /**
+    __Note #1__: The `body` option cannot be used with the `json` or `form` option.
+
+    __Note #2__: If you provide this option, `got.stream()` will be read-only.
+
+    __Note #3__: If you provide a payload with the `GET` or `HEAD` method, it will throw a `TypeError` unless the method is `GET` and the `allowGetBody` option is set to `true`.
+
+    __Note #4__: This option is not enumerable and will not be merged with the instance defaults.
+
+    The `content-length` header will be automatically set if `body` is a `string` / `Buffer` / [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) / [`form-data` instance](https://github.com/form-data/form-data), and `content-length` and `transfer-encoding` are not manually set in `options.headers`.
+
+    Since Got 12, the `content-length` is not automatically set when `body` is a `fs.createReadStream`.
+    */
+    get body() {
+        return this._internals.body;
+    }
+    set body(value) {
+        assert.any([distribution.string, distribution.buffer, distribution.nodeStream, distribution.generator, distribution.asyncGenerator, lib_isFormData, distribution.undefined], value);
+        if (distribution.nodeStream(value)) {
+            assert.truthy(value.readable);
+        }
+        if (value !== undefined) {
+            assert.undefined(this._internals.form);
+            assert.undefined(this._internals.json);
+        }
+        this._internals.body = value;
+    }
+    /**
+    The form body is converted to a query string using [`(new URLSearchParams(object)).toString()`](https://nodejs.org/api/url.html#url_constructor_new_urlsearchparams_obj).
+
+    If the `Content-Type` header is not present, it will be set to `application/x-www-form-urlencoded`.
+
+    __Note #1__: If you provide this option, `got.stream()` will be read-only.
+
+    __Note #2__: This option is not enumerable and will not be merged with the instance defaults.
+    */
+    get form() {
+        return this._internals.form;
+    }
+    set form(value) {
+        assert.any([distribution.plainObject, distribution.undefined], value);
+        if (value !== undefined) {
+            assert.undefined(this._internals.body);
+            assert.undefined(this._internals.json);
+        }
+        this._internals.form = value;
+    }
+    /**
+    JSON body. If the `Content-Type` header is not set, it will be set to `application/json`.
+
+    __Note #1__: If you provide this option, `got.stream()` will be read-only.
+
+    __Note #2__: This option is not enumerable and will not be merged with the instance defaults.
+    */
+    get json() {
+        return this._internals.json;
+    }
+    set json(value) {
+        if (value !== undefined) {
+            assert.undefined(this._internals.body);
+            assert.undefined(this._internals.form);
+        }
+        this._internals.json = value;
+    }
+    /**
+    The URL to request, as a string, a [`https.request` options object](https://nodejs.org/api/https.html#https_https_request_options_callback), or a [WHATWG `URL`](https://nodejs.org/api/url.html#url_class_url).
+
+    Properties from `options` will override properties in the parsed `url`.
+
+    If no protocol is specified, it will throw a `TypeError`.
+
+    __Note__: The query string is **not** parsed as search params.
+
+    @example
+    ```
+    await got('https://example.com/?query=a b'); //=> https://example.com/?query=a%20b
+    await got('https://example.com/', {searchParams: {query: 'a b'}}); //=> https://example.com/?query=a+b
+
+    // The query string is overridden by `searchParams`
+    await got('https://example.com/?query=a b', {searchParams: {query: 'a b'}}); //=> https://example.com/?query=a+b
+    ```
+    */
+    get url() {
+        return this._internals.url;
+    }
+    set url(value) {
+        assert.any([distribution.string, distribution.urlInstance, distribution.undefined], value);
+        if (value === undefined) {
+            this._internals.url = undefined;
+            return;
+        }
+        if (distribution.string(value) && value.startsWith('/')) {
+            throw new Error('`url` must not start with a slash');
+        }
+        const urlString = `${this.prefixUrl}${value.toString()}`;
+        const url = new URL(urlString);
+        this._internals.url = url;
+        if (url.protocol === 'unix:') {
+            url.href = `http://unix${url.pathname}${url.search}`;
+        }
+        if (url.protocol !== 'http:' && url.protocol !== 'https:') {
+            const error = new Error(`Unsupported protocol: ${url.protocol}`);
+            error.code = 'ERR_UNSUPPORTED_PROTOCOL';
+            throw error;
+        }
+        if (this._internals.username) {
+            url.username = this._internals.username;
+            this._internals.username = '';
+        }
+        if (this._internals.password) {
+            url.password = this._internals.password;
+            this._internals.password = '';
+        }
+        if (this._internals.searchParams) {
+            url.search = this._internals.searchParams.toString();
+            this._internals.searchParams = undefined;
+        }
+        if (url.hostname === 'unix') {
+            if (!this._internals.enableUnixSockets) {
+                throw new Error('Using UNIX domain sockets but option `enableUnixSockets` is not enabled');
+            }
+            const matches = /(?<socketPath>.+?):(?<path>.+)/.exec(`${url.pathname}${url.search}`);
+            if (matches?.groups) {
+                const { socketPath, path } = matches.groups;
+                this._unixOptions = {
+                    socketPath,
+                    path,
+                    host: '',
+                };
+            }
+            else {
+                this._unixOptions = undefined;
+            }
+            return;
+        }
+        this._unixOptions = undefined;
+    }
+    /**
+    Cookie support. You don't have to care about parsing or how to store them.
+
+    __Note__: If you provide this option, `options.headers.cookie` will be overridden.
+    */
+    get cookieJar() {
+        return this._internals.cookieJar;
+    }
+    set cookieJar(value) {
+        assert.any([distribution.object, distribution.undefined], value);
+        if (value === undefined) {
+            this._internals.cookieJar = undefined;
+            return;
+        }
+        let { setCookie, getCookieString } = value;
+        assert.function(setCookie);
+        assert.function(getCookieString);
+        /* istanbul ignore next: Horrible `tough-cookie` v3 check */
+        if (setCookie.length === 4 && getCookieString.length === 0) {
+            setCookie = (0,external_node_util_.promisify)(setCookie.bind(value));
+            getCookieString = (0,external_node_util_.promisify)(getCookieString.bind(value));
+            this._internals.cookieJar = {
+                setCookie,
+                getCookieString: getCookieString,
+            };
+        }
+        else {
+            this._internals.cookieJar = value;
+        }
+    }
+    /**
+    You can abort the `request` using [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController).
+
+    @example
+    ```
+    import got from 'got';
+
+    const abortController = new AbortController();
+
+    const request = got('https://httpbin.org/anything', {
+        signal: abortController.signal
+    });
+
+    setTimeout(() => {
+        abortController.abort();
+    }, 100);
+    ```
+    */
+    get signal() {
+        return this._internals.signal;
+    }
+    set signal(value) {
+        assert.object(value);
+        this._internals.signal = value;
+    }
+    /**
+    Ignore invalid cookies instead of throwing an error.
+    Only useful when the `cookieJar` option has been set. Not recommended.
+
+    @default false
+    */
+    get ignoreInvalidCookies() {
+        return this._internals.ignoreInvalidCookies;
+    }
+    set ignoreInvalidCookies(value) {
+        assert.boolean(value);
+        this._internals.ignoreInvalidCookies = value;
+    }
+    /**
+    Query string that will be added to the request URL.
+    This will override the query string in `url`.
+
+    If you need to pass in an array, you can do it using a `URLSearchParams` instance.
+
+    @example
+    ```
+    import got from 'got';
+
+    const searchParams = new URLSearchParams([['key', 'a'], ['key', 'b']]);
+
+    await got('https://example.com', {searchParams});
+
+    console.log(searchParams.toString());
+    //=> 'key=a&key=b'
+    ```
+    */
+    get searchParams() {
+        if (this._internals.url) {
+            return this._internals.url.searchParams;
+        }
+        if (this._internals.searchParams === undefined) {
+            this._internals.searchParams = new URLSearchParams();
+        }
+        return this._internals.searchParams;
+    }
+    set searchParams(value) {
+        assert.any([distribution.string, distribution.object, distribution.undefined], value);
+        const url = this._internals.url;
+        if (value === undefined) {
+            this._internals.searchParams = undefined;
+            if (url) {
+                url.search = '';
+            }
+            return;
+        }
+        const searchParameters = this.searchParams;
+        let updated;
+        if (distribution.string(value)) {
+            updated = new URLSearchParams(value);
+        }
+        else if (value instanceof URLSearchParams) {
+            updated = value;
+        }
+        else {
+            validateSearchParameters(value);
+            updated = new URLSearchParams();
+            // eslint-disable-next-line guard-for-in
+            for (const key in value) {
+                const entry = value[key];
+                if (entry === null) {
+                    updated.append(key, '');
+                }
+                else if (entry === undefined) {
+                    searchParameters.delete(key);
+                }
+                else {
+                    updated.append(key, entry);
+                }
+            }
+        }
+        if (this._merging) {
+            // These keys will be replaced
+            for (const key of updated.keys()) {
+                searchParameters.delete(key);
+            }
+            for (const [key, value] of updated) {
+                searchParameters.append(key, value);
+            }
+        }
+        else if (url) {
+            url.search = searchParameters.toString();
+        }
+        else {
+            this._internals.searchParams = searchParameters;
+        }
+    }
+    get searchParameters() {
+        throw new Error('The `searchParameters` option does not exist. Use `searchParams` instead.');
+    }
+    set searchParameters(_value) {
+        throw new Error('The `searchParameters` option does not exist. Use `searchParams` instead.');
+    }
+    get dnsLookup() {
+        return this._internals.dnsLookup;
+    }
+    set dnsLookup(value) {
+        assert.any([distribution.function, distribution.undefined], value);
+        this._internals.dnsLookup = value;
+    }
+    /**
+    An instance of [`CacheableLookup`](https://github.com/szmarczak/cacheable-lookup) used for making DNS lookups.
+    Useful when making lots of requests to different *public* hostnames.
+
+    `CacheableLookup` uses `dns.resolver4(..)` and `dns.resolver6(...)` under the hood and fall backs to `dns.lookup(...)` when the first two fail, which may lead to additional delay.
+
+    __Note__: This should stay disabled when making requests to internal hostnames such as `localhost`, `database.local` etc.
+
+    @default false
+    */
+    get dnsCache() {
+        return this._internals.dnsCache;
+    }
+    set dnsCache(value) {
+        assert.any([distribution.object, distribution.boolean, distribution.undefined], value);
+        if (value === true) {
+            this._internals.dnsCache = getGlobalDnsCache();
+        }
+        else if (value === false) {
+            this._internals.dnsCache = undefined;
+        }
+        else {
+            this._internals.dnsCache = value;
+        }
+    }
+    /**
+    User data. `context` is shallow merged and enumerable. If it contains non-enumerable properties they will NOT be merged.
+
+    @example
+    ```
+    import got from 'got';
+
+    const instance = got.extend({
+        hooks: {
+            beforeRequest: [
+                options => {
+                    if (!options.context || !options.context.token) {
+                        throw new Error('Token required');
+                    }
+
+                    options.headers.token = options.context.token;
+                }
+            ]
+        }
+    });
+
+    const context = {
+        token: 'secret'
+    };
+
+    const response = await instance('https://httpbin.org/headers', {context});
+
+    // Let's see the headers
+    console.log(response.body);
+    ```
+    */
+    get context() {
+        return this._internals.context;
+    }
+    set context(value) {
+        assert.object(value);
+        if (this._merging) {
+            Object.assign(this._internals.context, value);
+        }
+        else {
+            this._internals.context = { ...value };
+        }
+    }
+    /**
+    Hooks allow modifications during the request lifecycle.
+    Hook functions may be async and are run serially.
+    */
+    get hooks() {
+        return this._internals.hooks;
+    }
+    set hooks(value) {
+        assert.object(value);
+        // eslint-disable-next-line guard-for-in
+        for (const knownHookEvent in value) {
+            if (!(knownHookEvent in this._internals.hooks)) {
+                throw new Error(`Unexpected hook event: ${knownHookEvent}`);
+            }
+            const typedKnownHookEvent = knownHookEvent;
+            const hooks = value[typedKnownHookEvent];
+            assert.any([distribution.array, distribution.undefined], hooks);
+            if (hooks) {
+                for (const hook of hooks) {
+                    assert.function(hook);
+                }
+            }
+            if (this._merging) {
+                if (hooks) {
+                    // @ts-expect-error FIXME
+                    this._internals.hooks[typedKnownHookEvent].push(...hooks);
+                }
+            }
+            else {
+                if (!hooks) {
+                    throw new Error(`Missing hook event: ${knownHookEvent}`);
+                }
+                // @ts-expect-error FIXME
+                this._internals.hooks[knownHookEvent] = [...hooks];
+            }
+        }
+    }
+    /**
+    Whether redirect responses should be followed automatically.
+
+    Optionally, pass a function to dynamically decide based on the response object.
+
+    Note that if a `303` is sent by the server in response to any request type (`POST`, `DELETE`, etc.), Got will automatically request the resource pointed to in the location header via `GET`.
+    This is in accordance with [the spec](https://tools.ietf.org/html/rfc7231#section-6.4.4). You can optionally turn on this behavior also for other redirect codes - see `methodRewriting`.
+
+    @default true
+    */
+    get followRedirect() {
+        return this._internals.followRedirect;
+    }
+    set followRedirect(value) {
+        assert.any([distribution.boolean, distribution.function], value);
+        this._internals.followRedirect = value;
+    }
+    get followRedirects() {
+        throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');
+    }
+    set followRedirects(_value) {
+        throw new TypeError('The `followRedirects` option does not exist. Use `followRedirect` instead.');
+    }
+    /**
+    If exceeded, the request will be aborted and a `MaxRedirectsError` will be thrown.
+
+    @default 10
+    */
+    get maxRedirects() {
+        return this._internals.maxRedirects;
+    }
+    set maxRedirects(value) {
+        assert.number(value);
+        this._internals.maxRedirects = value;
+    }
+    /**
+    A cache adapter instance for storing cached response data.
+
+    @default false
+    */
+    get cache() {
+        return this._internals.cache;
+    }
+    set cache(value) {
+        assert.any([distribution.object, distribution.string, distribution.boolean, distribution.undefined], value);
+        if (value === true) {
+            this._internals.cache = globalCache;
+        }
+        else if (value === false) {
+            this._internals.cache = undefined;
+        }
+        else {
+            this._internals.cache = value;
+        }
+    }
+    /**
+    Determines if a `got.HTTPError` is thrown for unsuccessful responses.
+
+    If this is disabled, requests that encounter an error status code will be resolved with the `response` instead of throwing.
+    This may be useful if you are checking for resource availability and are expecting error responses.
+
+    @default true
+    */
+    get throwHttpErrors() {
+        return this._internals.throwHttpErrors;
+    }
+    set throwHttpErrors(value) {
+        assert.boolean(value);
+        this._internals.throwHttpErrors = value;
+    }
+    get username() {
+        const url = this._internals.url;
+        const value = url ? url.username : this._internals.username;
+        return decodeURIComponent(value);
+    }
+    set username(value) {
+        assert.string(value);
+        const url = this._internals.url;
+        const fixedValue = encodeURIComponent(value);
+        if (url) {
+            url.username = fixedValue;
+        }
+        else {
+            this._internals.username = fixedValue;
+        }
+    }
+    get password() {
+        const url = this._internals.url;
+        const value = url ? url.password : this._internals.password;
+        return decodeURIComponent(value);
+    }
+    set password(value) {
+        assert.string(value);
+        const url = this._internals.url;
+        const fixedValue = encodeURIComponent(value);
+        if (url) {
+            url.password = fixedValue;
+        }
+        else {
+            this._internals.password = fixedValue;
+        }
+    }
+    /**
+    If set to `true`, Got will additionally accept HTTP2 requests.
+
+    It will choose either HTTP/1.1 or HTTP/2 depending on the ALPN protocol.
+
+    __Note__: This option requires Node.js 15.10.0 or newer as HTTP/2 support on older Node.js versions is very buggy.
+
+    __Note__: Overriding `options.request` will disable HTTP2 support.
+
+    @default false
+
+    @example
+    ```
+    import got from 'got';
+
+    const {headers} = await got('https://nghttp2.org/httpbin/anything', {http2: true});
+
+    console.log(headers.via);
+    //=> '2 nghttpx'
+    ```
+    */
+    get http2() {
+        return this._internals.http2;
+    }
+    set http2(value) {
+        assert.boolean(value);
+        this._internals.http2 = value;
+    }
+    /**
+    Set this to `true` to allow sending body for the `GET` method.
+    However, the [HTTP/2 specification](https://tools.ietf.org/html/rfc7540#section-8.1.3) says that `An HTTP GET request includes request header fields and no payload body`, therefore when using the HTTP/2 protocol this option will have no effect.
+    This option is only meant to interact with non-compliant servers when you have no other choice.
+
+    __Note__: The [RFC 7231](https://tools.ietf.org/html/rfc7231#section-4.3.1) doesn't specify any particular behavior for the GET method having a payload, therefore __it's considered an [anti-pattern](https://en.wikipedia.org/wiki/Anti-pattern)__.
+
+    @default false
+    */
+    get allowGetBody() {
+        return this._internals.allowGetBody;
+    }
+    set allowGetBody(value) {
+        assert.boolean(value);
+        this._internals.allowGetBody = value;
+    }
+    /**
+    Request headers.
+
+    Existing headers will be overwritten. Headers set to `undefined` will be omitted.
+
+    @default {}
+    */
+    get headers() {
+        return this._internals.headers;
+    }
+    set headers(value) {
+        assert.plainObject(value);
+        if (this._merging) {
+            Object.assign(this._internals.headers, lowercaseKeys(value));
+        }
+        else {
+            this._internals.headers = lowercaseKeys(value);
+        }
+    }
+    /**
+    Specifies if the HTTP request method should be [rewritten as `GET`](https://tools.ietf.org/html/rfc7231#section-6.4) on redirects.
+
+    As the [specification](https://tools.ietf.org/html/rfc7231#section-6.4) prefers to rewrite the HTTP method only on `303` responses, this is Got's default behavior.
+    Setting `methodRewriting` to `true` will also rewrite `301` and `302` responses, as allowed by the spec. This is the behavior followed by `curl` and browsers.
+
+    __Note__: Got never performs method rewriting on `307` and `308` responses, as this is [explicitly prohibited by the specification](https://www.rfc-editor.org/rfc/rfc7231#section-6.4.7).
+
+    @default false
+    */
+    get methodRewriting() {
+        return this._internals.methodRewriting;
+    }
+    set methodRewriting(value) {
+        assert.boolean(value);
+        this._internals.methodRewriting = value;
+    }
+    /**
+    Indicates which DNS record family to use.
+
+    Values:
+    - `undefined`: IPv4 (if present) or IPv6
+    - `4`: Only IPv4
+    - `6`: Only IPv6
+
+    @default undefined
+    */
+    get dnsLookupIpVersion() {
+        return this._internals.dnsLookupIpVersion;
+    }
+    set dnsLookupIpVersion(value) {
+        if (value !== undefined && value !== 4 && value !== 6) {
+            throw new TypeError(`Invalid DNS lookup IP version: ${value}`);
+        }
+        this._internals.dnsLookupIpVersion = value;
+    }
+    /**
+    A function used to parse JSON responses.
+
+    @example
+    ```
+    import got from 'got';
+    import Bourne from '@hapi/bourne';
+
+    const parsed = await got('https://example.com', {
+        parseJson: text => Bourne.parse(text)
+    }).json();
+
+    console.log(parsed);
+    ```
+    */
+    get parseJson() {
+        return this._internals.parseJson;
+    }
+    set parseJson(value) {
+        assert.function(value);
+        this._internals.parseJson = value;
+    }
+    /**
+    A function used to stringify the body of JSON requests.
+
+    @example
+    ```
+    import got from 'got';
+
+    await got.post('https://example.com', {
+        stringifyJson: object => JSON.stringify(object, (key, value) => {
+            if (key.startsWith('_')) {
+                return;
+            }
+
+            return value;
+        }),
+        json: {
+            some: 'payload',
+            _ignoreMe: 1234
+        }
+    });
+    ```
+
+    @example
+    ```
+    import got from 'got';
+
+    await got.post('https://example.com', {
+        stringifyJson: object => JSON.stringify(object, (key, value) => {
+            if (typeof value === 'number') {
+                return value.toString();
+            }
+
+            return value;
+        }),
+        json: {
+            some: 'payload',
+            number: 1
+        }
+    });
+    ```
+    */
+    get stringifyJson() {
+        return this._internals.stringifyJson;
+    }
+    set stringifyJson(value) {
+        assert.function(value);
+        this._internals.stringifyJson = value;
+    }
+    /**
+    An object representing `limit`, `calculateDelay`, `methods`, `statusCodes`, `maxRetryAfter` and `errorCodes` fields for maximum retry count, retry handler, allowed methods, allowed status codes, maximum [`Retry-After`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) time and allowed error codes.
+
+    Delays between retries counts with function `1000 * Math.pow(2, retry) + Math.random() * 100`, where `retry` is attempt number (starts from 1).
+
+    The `calculateDelay` property is a `function` that receives an object with `attemptCount`, `retryOptions`, `error` and `computedValue` properties for current retry count, the retry options, error and default computed value.
+    The function must return a delay in milliseconds (or a Promise resolving with it) (`0` return value cancels retry).
+
+    By default, it retries *only* on the specified methods, status codes, and on these network errors:
+
+    - `ETIMEDOUT`: One of the [timeout](#timeout) limits were reached.
+    - `ECONNRESET`: Connection was forcibly closed by a peer.
+    - `EADDRINUSE`: Could not bind to any free port.
+    - `ECONNREFUSED`: Connection was refused by the server.
+    - `EPIPE`: The remote side of the stream being written has been closed.
+    - `ENOTFOUND`: Couldn't resolve the hostname to an IP address.
+    - `ENETUNREACH`: No internet connection.
+    - `EAI_AGAIN`: DNS lookup timed out.
+
+    __Note__: If `maxRetryAfter` is set to `undefined`, it will use `options.timeout`.
+    __Note__: If [`Retry-After`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) header is greater than `maxRetryAfter`, it will cancel the request.
+    */
+    get retry() {
+        return this._internals.retry;
+    }
+    set retry(value) {
+        assert.plainObject(value);
+        assert.any([distribution.function, distribution.undefined], value.calculateDelay);
+        assert.any([distribution.number, distribution.undefined], value.maxRetryAfter);
+        assert.any([distribution.number, distribution.undefined], value.limit);
+        assert.any([distribution.array, distribution.undefined], value.methods);
+        assert.any([distribution.array, distribution.undefined], value.statusCodes);
+        assert.any([distribution.array, distribution.undefined], value.errorCodes);
+        assert.any([distribution.number, distribution.undefined], value.noise);
+        if (value.noise && Math.abs(value.noise) > 100) {
+            throw new Error(`The maximum acceptable retry noise is +/- 100ms, got ${value.noise}`);
+        }
+        for (const key in value) {
+            if (!(key in this._internals.retry)) {
+                throw new Error(`Unexpected retry option: ${key}`);
+            }
+        }
+        if (this._merging) {
+            Object.assign(this._internals.retry, value);
+        }
+        else {
+            this._internals.retry = { ...value };
+        }
+        const { retry } = this._internals;
+        retry.methods = [...new Set(retry.methods.map(method => method.toUpperCase()))];
+        retry.statusCodes = [...new Set(retry.statusCodes)];
+        retry.errorCodes = [...new Set(retry.errorCodes)];
+    }
+    /**
+    From `http.RequestOptions`.
+
+    The IP address used to send the request from.
+    */
+    get localAddress() {
+        return this._internals.localAddress;
+    }
+    set localAddress(value) {
+        assert.any([distribution.string, distribution.undefined], value);
+        this._internals.localAddress = value;
+    }
+    /**
+    The HTTP method used to make the request.
+
+    @default 'GET'
+    */
+    get method() {
+        return this._internals.method;
+    }
+    set method(value) {
+        assert.string(value);
+        this._internals.method = value.toUpperCase();
+    }
+    get createConnection() {
+        return this._internals.createConnection;
+    }
+    set createConnection(value) {
+        assert.any([distribution.function, distribution.undefined], value);
+        this._internals.createConnection = value;
+    }
+    /**
+    From `http-cache-semantics`
+
+    @default {}
+    */
+    get cacheOptions() {
+        return this._internals.cacheOptions;
+    }
+    set cacheOptions(value) {
+        assert.plainObject(value);
+        assert.any([distribution.boolean, distribution.undefined], value.shared);
+        assert.any([distribution.number, distribution.undefined], value.cacheHeuristic);
+        assert.any([distribution.number, distribution.undefined], value.immutableMinTimeToLive);
+        assert.any([distribution.boolean, distribution.undefined], value.ignoreCargoCult);
+        for (const key in value) {
+            if (!(key in this._internals.cacheOptions)) {
+                throw new Error(`Cache option \`${key}\` does not exist`);
+            }
+        }
+        if (this._merging) {
+            Object.assign(this._internals.cacheOptions, value);
+        }
+        else {
+            this._internals.cacheOptions = { ...value };
+        }
+    }
+    /**
+    Options for the advanced HTTPS API.
+    */
+    get https() {
+        return this._internals.https;
+    }
+    set https(value) {
+        assert.plainObject(value);
+        assert.any([distribution.boolean, distribution.undefined], value.rejectUnauthorized);
+        assert.any([distribution.function, distribution.undefined], value.checkServerIdentity);
+        assert.any([distribution.string, distribution.object, distribution.array, distribution.undefined], value.certificateAuthority);
+        assert.any([distribution.string, distribution.object, distribution.array, distribution.undefined], value.key);
+        assert.any([distribution.string, distribution.object, distribution.array, distribution.undefined], value.certificate);
+        assert.any([distribution.string, distribution.undefined], value.passphrase);
+        assert.any([distribution.string, distribution.buffer, distribution.array, distribution.undefined], value.pfx);
+        assert.any([distribution.array, distribution.undefined], value.alpnProtocols);
+        assert.any([distribution.string, distribution.undefined], value.ciphers);
+        assert.any([distribution.string, distribution.buffer, distribution.undefined], value.dhparam);
+        assert.any([distribution.string, distribution.undefined], value.signatureAlgorithms);
+        assert.any([distribution.string, distribution.undefined], value.minVersion);
+        assert.any([distribution.string, distribution.undefined], value.maxVersion);
+        assert.any([distribution.boolean, distribution.undefined], value.honorCipherOrder);
+        assert.any([distribution.number, distribution.undefined], value.tlsSessionLifetime);
+        assert.any([distribution.string, distribution.undefined], value.ecdhCurve);
+        assert.any([distribution.string, distribution.buffer, distribution.array, distribution.undefined], value.certificateRevocationLists);
+        for (const key in value) {
+            if (!(key in this._internals.https)) {
+                throw new Error(`HTTPS option \`${key}\` does not exist`);
+            }
+        }
+        if (this._merging) {
+            Object.assign(this._internals.https, value);
+        }
+        else {
+            this._internals.https = { ...value };
+        }
+    }
+    /**
+    [Encoding](https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings) to be used on `setEncoding` of the response data.
+
+    To get a [`Buffer`](https://nodejs.org/api/buffer.html), you need to set `responseType` to `buffer` instead.
+    Don't set this option to `null`.
+
+    __Note__: This doesn't affect streams! Instead, you need to do `got.stream(...).setEncoding(encoding)`.
+
+    @default 'utf-8'
+    */
+    get encoding() {
+        return this._internals.encoding;
+    }
+    set encoding(value) {
+        if (value === null) {
+            throw new TypeError('To get a Buffer, set `options.responseType` to `buffer` instead');
+        }
+        assert.any([distribution.string, distribution.undefined], value);
+        this._internals.encoding = value;
+    }
+    /**
+    When set to `true` the promise will return the Response body instead of the Response object.
+
+    @default false
+    */
+    get resolveBodyOnly() {
+        return this._internals.resolveBodyOnly;
+    }
+    set resolveBodyOnly(value) {
+        assert.boolean(value);
+        this._internals.resolveBodyOnly = value;
+    }
+    /**
+    Returns a `Stream` instead of a `Promise`.
+    This is equivalent to calling `got.stream(url, options?)`.
+
+    @default false
+    */
+    get isStream() {
+        return this._internals.isStream;
+    }
+    set isStream(value) {
+        assert.boolean(value);
+        this._internals.isStream = value;
+    }
+    /**
+    The parsing method.
+
+    The promise also has `.text()`, `.json()` and `.buffer()` methods which return another Got promise for the parsed body.
+
+    It's like setting the options to `{responseType: 'json', resolveBodyOnly: true}` but without affecting the main Got promise.
+
+    __Note__: When using streams, this option is ignored.
+
+    @example
+    ```
+    const responsePromise = got(url);
+    const bufferPromise = responsePromise.buffer();
+    const jsonPromise = responsePromise.json();
+
+    const [response, buffer, json] = Promise.all([responsePromise, bufferPromise, jsonPromise]);
+    // `response` is an instance of Got Response
+    // `buffer` is an instance of Buffer
+    // `json` is an object
+    ```
+
+    @example
+    ```
+    // This
+    const body = await got(url).json();
+
+    // is semantically the same as this
+    const body = await got(url, {responseType: 'json', resolveBodyOnly: true});
+    ```
+    */
+    get responseType() {
+        return this._internals.responseType;
+    }
+    set responseType(value) {
+        if (value === undefined) {
+            this._internals.responseType = 'text';
+            return;
+        }
+        if (value !== 'text' && value !== 'buffer' && value !== 'json') {
+            throw new Error(`Invalid \`responseType\` option: ${value}`);
+        }
+        this._internals.responseType = value;
+    }
+    get pagination() {
+        return this._internals.pagination;
+    }
+    set pagination(value) {
+        assert.object(value);
+        if (this._merging) {
+            Object.assign(this._internals.pagination, value);
+        }
+        else {
+            this._internals.pagination = value;
+        }
+    }
+    get auth() {
+        throw new Error('Parameter `auth` is deprecated. Use `username` / `password` instead.');
+    }
+    set auth(_value) {
+        throw new Error('Parameter `auth` is deprecated. Use `username` / `password` instead.');
+    }
+    get setHost() {
+        return this._internals.setHost;
+    }
+    set setHost(value) {
+        assert.boolean(value);
+        this._internals.setHost = value;
+    }
+    get maxHeaderSize() {
+        return this._internals.maxHeaderSize;
+    }
+    set maxHeaderSize(value) {
+        assert.any([distribution.number, distribution.undefined], value);
+        this._internals.maxHeaderSize = value;
+    }
+    get enableUnixSockets() {
+        return this._internals.enableUnixSockets;
+    }
+    set enableUnixSockets(value) {
+        assert.boolean(value);
+        this._internals.enableUnixSockets = value;
+    }
+    // eslint-disable-next-line @typescript-eslint/naming-convention
+    toJSON() {
+        return { ...this._internals };
+    }
+    [Symbol.for('nodejs.util.inspect.custom')](_depth, options) {
+        return (0,external_node_util_.inspect)(this._internals, options);
+    }
+    createNativeRequestOptions() {
+        const internals = this._internals;
+        const url = internals.url;
+        let agent;
+        if (url.protocol === 'https:') {
+            agent = internals.http2 ? internals.agent : internals.agent.https;
+        }
+        else {
+            agent = internals.agent.http;
+        }
+        const { https } = internals;
+        let { pfx } = https;
+        if (distribution.array(pfx) && distribution.plainObject(pfx[0])) {
+            pfx = pfx.map(object => ({
+                buf: object.buffer,
+                passphrase: object.passphrase,
+            }));
+        }
+        return {
+            ...internals.cacheOptions,
+            ...this._unixOptions,
+            // HTTPS options
+            // eslint-disable-next-line @typescript-eslint/naming-convention
+            ALPNProtocols: https.alpnProtocols,
+            ca: https.certificateAuthority,
+            cert: https.certificate,
+            key: https.key,
+            passphrase: https.passphrase,
+            pfx: https.pfx,
+            rejectUnauthorized: https.rejectUnauthorized,
+            checkServerIdentity: https.checkServerIdentity ?? external_node_tls_namespaceObject.checkServerIdentity,
+            ciphers: https.ciphers,
+            honorCipherOrder: https.honorCipherOrder,
+            minVersion: https.minVersion,
+            maxVersion: https.maxVersion,
+            sigalgs: https.signatureAlgorithms,
+            sessionTimeout: https.tlsSessionLifetime,
+            dhparam: https.dhparam,
+            ecdhCurve: https.ecdhCurve,
+            crl: https.certificateRevocationLists,
+            // HTTP options
+            lookup: internals.dnsLookup ?? internals.dnsCache?.lookup,
+            family: internals.dnsLookupIpVersion,
+            agent,
+            setHost: internals.setHost,
+            method: internals.method,
+            maxHeaderSize: internals.maxHeaderSize,
+            localAddress: internals.localAddress,
+            headers: internals.headers,
+            createConnection: internals.createConnection,
+            timeout: internals.http2 ? getHttp2TimeoutOption(internals) : undefined,
+            // HTTP/2 options
+            h2session: internals.h2session,
+        };
+    }
+    getRequestFunction() {
+        const url = this._internals.url;
+        const { request } = this._internals;
+        if (!request && url) {
+            return this.getFallbackRequestFunction();
+        }
+        return request;
+    }
+    getFallbackRequestFunction() {
+        const url = this._internals.url;
+        if (!url) {
+            return;
+        }
+        if (url.protocol === 'https:') {
+            if (this._internals.http2) {
+                if (major < 15 || (major === 15 && minor < 10)) {
+                    const error = new Error('To use the `http2` option, install Node.js 15.10.0 or above');
+                    error.code = 'EUNSUPPORTED';
+                    throw error;
+                }
+                return http2_wrapper_source.auto;
+            }
+            return external_node_https_.request;
+        }
+        return external_node_http_.request;
+    }
+    freeze() {
+        const options = this._internals;
+        Object.freeze(options);
+        Object.freeze(options.hooks);
+        Object.freeze(options.hooks.afterResponse);
+        Object.freeze(options.hooks.beforeError);
+        Object.freeze(options.hooks.beforeRedirect);
+        Object.freeze(options.hooks.beforeRequest);
+        Object.freeze(options.hooks.beforeRetry);
+        Object.freeze(options.hooks.init);
+        Object.freeze(options.https);
+        Object.freeze(options.cacheOptions);
+        Object.freeze(options.agent);
+        Object.freeze(options.headers);
+        Object.freeze(options.timeout);
+        Object.freeze(options.retry);
+        Object.freeze(options.retry.errorCodes);
+        Object.freeze(options.retry.methods);
+        Object.freeze(options.retry.statusCodes);
+    }
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/response.js
+
+const isResponseOk = (response) => {
+    const { statusCode } = response;
+    const { followRedirect } = response.request.options;
+    const shouldFollow = typeof followRedirect === 'function' ? followRedirect(response) : followRedirect;
+    const limitStatusCode = shouldFollow ? 299 : 399;
+    return (statusCode >= 200 && statusCode <= limitStatusCode) || statusCode === 304;
+};
+/**
+An error to be thrown when server response code is 2xx, and parsing body fails.
+Includes a `response` property.
+*/
+class ParseError extends RequestError {
+    constructor(error, response) {
+        const { options } = response.request;
+        super(`${error.message} in "${options.url.toString()}"`, error, response.request);
+        this.name = 'ParseError';
+        this.code = 'ERR_BODY_PARSE_FAILURE';
+    }
+}
+const parseBody = (response, responseType, parseJson, encoding) => {
+    const { rawBody } = response;
+    try {
+        if (responseType === 'text') {
+            return rawBody.toString(encoding);
+        }
+        if (responseType === 'json') {
+            return rawBody.length === 0 ? '' : parseJson(rawBody.toString(encoding));
+        }
+        if (responseType === 'buffer') {
+            return rawBody;
+        }
+    }
+    catch (error) {
+        throw new ParseError(error, response);
+    }
+    throw new ParseError({
+        message: `Unknown body type '${responseType}'`,
+        name: 'Error',
+    }, response);
+};
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/is-client-request.js
+function isClientRequest(clientRequest) {
+    return clientRequest.writable && !clientRequest.writableEnded;
+}
+/* harmony default export */ const is_client_request = (isClientRequest);
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/utils/is-unix-socket-url.js
+// eslint-disable-next-line @typescript-eslint/naming-convention
+function isUnixSocketURL(url) {
+    return url.protocol === 'unix:' || url.hostname === 'unix';
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/core/index.js
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+const supportsBrotli = distribution.string(external_node_process_.versions.brotli);
+const methodsWithoutBody = new Set(['GET', 'HEAD']);
+const cacheableStore = new WeakableMap();
+const redirectCodes = new Set([300, 301, 302, 303, 304, 307, 308]);
+const proxiedRequestEvents = [
+    'socket',
+    'connect',
+    'continue',
+    'information',
+    'upgrade',
+];
+const core_noop = () => { };
+class Request extends external_node_stream_.Duplex {
+    // @ts-expect-error - Ignoring for now.
+    ['constructor'];
+    _noPipe;
+    // @ts-expect-error https://github.com/microsoft/TypeScript/issues/9568
+    options;
+    response;
+    requestUrl;
+    redirectUrls;
+    retryCount;
+    _stopRetry;
+    _downloadedSize;
+    _uploadedSize;
+    _stopReading;
+    _pipedServerResponses;
+    _request;
+    _responseSize;
+    _bodySize;
+    _unproxyEvents;
+    _isFromCache;
+    _cannotHaveBody;
+    _triggerRead;
+    _cancelTimeouts;
+    _removeListeners;
+    _nativeResponse;
+    _flushed;
+    _aborted;
+    // We need this because `this._request` if `undefined` when using cache
+    _requestInitialized;
+    constructor(url, options, defaults) {
+        super({
+            // Don't destroy immediately, as the error may be emitted on unsuccessful retry
+            autoDestroy: false,
+            // It needs to be zero because we're just proxying the data to another stream
+            highWaterMark: 0,
+        });
+        this._downloadedSize = 0;
+        this._uploadedSize = 0;
+        this._stopReading = false;
+        this._pipedServerResponses = new Set();
+        this._cannotHaveBody = false;
+        this._unproxyEvents = core_noop;
+        this._triggerRead = false;
+        this._cancelTimeouts = core_noop;
+        this._removeListeners = core_noop;
+        this._jobs = [];
+        this._flushed = false;
+        this._requestInitialized = false;
+        this._aborted = false;
+        this.redirectUrls = [];
+        this.retryCount = 0;
+        this._stopRetry = core_noop;
+        this.on('pipe', (source) => {
+            if (source?.headers) {
+                Object.assign(this.options.headers, source.headers);
+            }
+        });
+        this.on('newListener', event => {
+            if (event === 'retry' && this.listenerCount('retry') > 0) {
+                throw new Error('A retry listener has been attached already.');
+            }
+        });
+        try {
+            this.options = new Options(url, options, defaults);
+            if (!this.options.url) {
+                if (this.options.prefixUrl === '') {
+                    throw new TypeError('Missing `url` property');
+                }
+                this.options.url = '';
+            }
+            this.requestUrl = this.options.url;
+        }
+        catch (error) {
+            const { options } = error;
+            if (options) {
+                this.options = options;
+            }
+            this.flush = async () => {
+                this.flush = async () => { };
+                this.destroy(error);
+            };
+            return;
+        }
+        // Important! If you replace `body` in a handler with another stream, make sure it's readable first.
+        // The below is run only once.
+        const { body } = this.options;
+        if (distribution.nodeStream(body)) {
+            body.once('error', error => {
+                if (this._flushed) {
+                    this._beforeError(new UploadError(error, this));
+                }
+                else {
+                    this.flush = async () => {
+                        this.flush = async () => { };
+                        this._beforeError(new UploadError(error, this));
+                    };
+                }
+            });
+        }
+        if (this.options.signal) {
+            const abort = () => {
+                // See https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal/timeout_static#return_value
+                if (this.options.signal?.reason?.name === 'TimeoutError') {
+                    this.destroy(new TimeoutError(this.options.signal.reason, this.timings, this));
+                }
+                else {
+                    this.destroy(new AbortError(this));
+                }
+            };
+            if (this.options.signal.aborted) {
+                abort();
+            }
+            else {
+                this.options.signal.addEventListener('abort', abort);
+                this._removeListeners = () => {
+                    this.options.signal?.removeEventListener('abort', abort);
+                };
+            }
+        }
+    }
+    async flush() {
+        if (this._flushed) {
+            return;
+        }
+        this._flushed = true;
+        try {
+            await this._finalizeBody();
+            if (this.destroyed) {
+                return;
+            }
+            await this._makeRequest();
+            if (this.destroyed) {
+                this._request?.destroy();
+                return;
+            }
+            // Queued writes etc.
+            for (const job of this._jobs) {
+                job();
+            }
+            // Prevent memory leak
+            this._jobs.length = 0;
+            this._requestInitialized = true;
+        }
+        catch (error) {
+            this._beforeError(error);
+        }
+    }
+    _beforeError(error) {
+        if (this._stopReading) {
+            return;
+        }
+        const { response, options } = this;
+        const attemptCount = this.retryCount + (error.name === 'RetryError' ? 0 : 1);
+        this._stopReading = true;
+        if (!(error instanceof RequestError)) {
+            error = new RequestError(error.message, error, this);
+        }
+        const typedError = error;
+        void (async () => {
+            // Node.js parser is really weird.
+            // It emits post-request Parse Errors on the same instance as previous request. WTF.
+            // Therefore, we need to check if it has been destroyed as well.
+            //
+            // Furthermore, Node.js 16 `response.destroy()` doesn't immediately destroy the socket,
+            // but makes the response unreadable. So we additionally need to check `response.readable`.
+            if (response?.readable && !response.rawBody && !this._request?.socket?.destroyed) {
+                // @types/node has incorrect typings. `setEncoding` accepts `null` as well.
+                response.setEncoding(this.readableEncoding);
+                const success = await this._setRawBody(response);
+                if (success) {
+                    response.body = response.rawBody.toString();
+                }
+            }
+            if (this.listenerCount('retry') !== 0) {
+                let backoff;
+                try {
+                    let retryAfter;
+                    if (response && 'retry-after' in response.headers) {
+                        retryAfter = Number(response.headers['retry-after']);
+                        if (Number.isNaN(retryAfter)) {
+                            retryAfter = Date.parse(response.headers['retry-after']) - Date.now();
+                            if (retryAfter <= 0) {
+                                retryAfter = 1;
+                            }
+                        }
+                        else {
+                            retryAfter *= 1000;
+                        }
+                    }
+                    const retryOptions = options.retry;
+                    backoff = await retryOptions.calculateDelay({
+                        attemptCount,
+                        retryOptions,
+                        error: typedError,
+                        retryAfter,
+                        computedValue: calculate_retry_delay({
+                            attemptCount,
+                            retryOptions,
+                            error: typedError,
+                            retryAfter,
+                            computedValue: retryOptions.maxRetryAfter ?? options.timeout.request ?? Number.POSITIVE_INFINITY,
+                        }),
+                    });
+                }
+                catch (error_) {
+                    void this._error(new RequestError(error_.message, error_, this));
+                    return;
+                }
+                if (backoff) {
+                    await new Promise(resolve => {
+                        const timeout = setTimeout(resolve, backoff);
+                        this._stopRetry = () => {
+                            clearTimeout(timeout);
+                            resolve();
+                        };
+                    });
+                    // Something forced us to abort the retry
+                    if (this.destroyed) {
+                        return;
+                    }
+                    try {
+                        for (const hook of this.options.hooks.beforeRetry) {
+                            // eslint-disable-next-line no-await-in-loop
+                            await hook(typedError, this.retryCount + 1);
+                        }
+                    }
+                    catch (error_) {
+                        void this._error(new RequestError(error_.message, error, this));
+                        return;
+                    }
+                    // Something forced us to abort the retry
+                    if (this.destroyed) {
+                        return;
+                    }
+                    this.destroy();
+                    this.emit('retry', this.retryCount + 1, error, (updatedOptions) => {
+                        const request = new Request(options.url, updatedOptions, options);
+                        request.retryCount = this.retryCount + 1;
+                        external_node_process_.nextTick(() => {
+                            void request.flush();
+                        });
+                        return request;
+                    });
+                    return;
+                }
+            }
+            void this._error(typedError);
+        })();
+    }
+    _read() {
+        this._triggerRead = true;
+        const { response } = this;
+        if (response && !this._stopReading) {
+            // We cannot put this in the `if` above
+            // because `.read()` also triggers the `end` event
+            if (response.readableLength) {
+                this._triggerRead = false;
+            }
+            let data;
+            while ((data = response.read()) !== null) {
+                this._downloadedSize += data.length; // eslint-disable-line @typescript-eslint/restrict-plus-operands
+                const progress = this.downloadProgress;
+                if (progress.percent < 1) {
+                    this.emit('downloadProgress', progress);
+                }
+                this.push(data);
+            }
+        }
+    }
+    _write(chunk, encoding, callback) {
+        const write = () => {
+            this._writeRequest(chunk, encoding, callback);
+        };
+        if (this._requestInitialized) {
+            write();
+        }
+        else {
+            this._jobs.push(write);
+        }
+    }
+    _final(callback) {
+        const endRequest = () => {
+            // We need to check if `this._request` is present,
+            // because it isn't when we use cache.
+            if (!this._request || this._request.destroyed) {
+                callback();
+                return;
+            }
+            this._request.end((error) => {
+                // The request has been destroyed before `_final` finished.
+                // See https://github.com/nodejs/node/issues/39356
+                if (this._request._writableState?.errored) {
+                    return;
+                }
+                if (!error) {
+                    this._bodySize = this._uploadedSize;
+                    this.emit('uploadProgress', this.uploadProgress);
+                    this._request.emit('upload-complete');
+                }
+                callback(error);
+            });
+        };
+        if (this._requestInitialized) {
+            endRequest();
+        }
+        else {
+            this._jobs.push(endRequest);
+        }
+    }
+    _destroy(error, callback) {
+        this._stopReading = true;
+        this.flush = async () => { };
+        // Prevent further retries
+        this._stopRetry();
+        this._cancelTimeouts();
+        this._removeListeners();
+        if (this.options) {
+            const { body } = this.options;
+            if (distribution.nodeStream(body)) {
+                body.destroy();
+            }
+        }
+        if (this._request) {
+            this._request.destroy();
+        }
+        if (error !== null && !distribution.undefined(error) && !(error instanceof RequestError)) {
+            error = new RequestError(error.message, error, this);
+        }
+        callback(error);
+    }
+    pipe(destination, options) {
+        if (destination instanceof external_node_http_.ServerResponse) {
+            this._pipedServerResponses.add(destination);
+        }
+        return super.pipe(destination, options);
+    }
+    unpipe(destination) {
+        if (destination instanceof external_node_http_.ServerResponse) {
+            this._pipedServerResponses.delete(destination);
+        }
+        super.unpipe(destination);
+        return this;
+    }
+    async _finalizeBody() {
+        const { options } = this;
+        const { headers } = options;
+        const isForm = !distribution.undefined(options.form);
+        // eslint-disable-next-line @typescript-eslint/naming-convention
+        const isJSON = !distribution.undefined(options.json);
+        const isBody = !distribution.undefined(options.body);
+        const cannotHaveBody = methodsWithoutBody.has(options.method) && !(options.method === 'GET' && options.allowGetBody);
+        this._cannotHaveBody = cannotHaveBody;
+        if (isForm || isJSON || isBody) {
+            if (cannotHaveBody) {
+                throw new TypeError(`The \`${options.method}\` method cannot be used with a body`);
+            }
+            // Serialize body
+            const noContentType = !distribution.string(headers['content-type']);
+            if (isBody) {
+                // Body is spec-compliant FormData
+                if (lib_isFormData(options.body)) {
+                    const encoder = new FormDataEncoder(options.body);
+                    if (noContentType) {
+                        headers['content-type'] = encoder.headers['Content-Type'];
+                    }
+                    if ('Content-Length' in encoder.headers) {
+                        headers['content-length'] = encoder.headers['Content-Length'];
+                    }
+                    options.body = encoder.encode();
+                }
+                // Special case for https://github.com/form-data/form-data
+                if (is_form_data_isFormData(options.body) && noContentType) {
+                    headers['content-type'] = `multipart/form-data; boundary=${options.body.getBoundary()}`;
+                }
+            }
+            else if (isForm) {
+                if (noContentType) {
+                    headers['content-type'] = 'application/x-www-form-urlencoded';
+                }
+                const { form } = options;
+                options.form = undefined;
+                options.body = (new URLSearchParams(form)).toString();
+            }
+            else {
+                if (noContentType) {
+                    headers['content-type'] = 'application/json';
+                }
+                const { json } = options;
+                options.json = undefined;
+                options.body = options.stringifyJson(json);
+            }
+            const uploadBodySize = await getBodySize(options.body, options.headers);
+            // See https://tools.ietf.org/html/rfc7230#section-3.3.2
+            // A user agent SHOULD send a Content-Length in a request message when
+            // no Transfer-Encoding is sent and the request method defines a meaning
+            // for an enclosed payload body.  For example, a Content-Length header
+            // field is normally sent in a POST request even when the value is 0
+            // (indicating an empty payload body).  A user agent SHOULD NOT send a
+            // Content-Length header field when the request message does not contain
+            // a payload body and the method semantics do not anticipate such a
+            // body.
+            if (distribution.undefined(headers['content-length']) && distribution.undefined(headers['transfer-encoding']) && !cannotHaveBody && !distribution.undefined(uploadBodySize)) {
+                headers['content-length'] = String(uploadBodySize);
+            }
+        }
+        if (options.responseType === 'json' && !('accept' in options.headers)) {
+            options.headers.accept = 'application/json';
+        }
+        this._bodySize = Number(headers['content-length']) || undefined;
+    }
+    async _onResponseBase(response) {
+        // This will be called e.g. when using cache so we need to check if this request has been aborted.
+        if (this.isAborted) {
+            return;
+        }
+        const { options } = this;
+        const { url } = options;
+        this._nativeResponse = response;
+        if (options.decompress) {
+            response = decompress_response(response);
+        }
+        const statusCode = response.statusCode;
+        const typedResponse = response;
+        typedResponse.statusMessage = typedResponse.statusMessage ?? external_node_http_.STATUS_CODES[statusCode];
+        typedResponse.url = options.url.toString();
+        typedResponse.requestUrl = this.requestUrl;
+        typedResponse.redirectUrls = this.redirectUrls;
+        typedResponse.request = this;
+        typedResponse.isFromCache = this._nativeResponse.fromCache ?? false;
+        typedResponse.ip = this.ip;
+        typedResponse.retryCount = this.retryCount;
+        typedResponse.ok = isResponseOk(typedResponse);
+        this._isFromCache = typedResponse.isFromCache;
+        this._responseSize = Number(response.headers['content-length']) || undefined;
+        this.response = typedResponse;
+        response.once('end', () => {
+            this._responseSize = this._downloadedSize;
+            this.emit('downloadProgress', this.downloadProgress);
+        });
+        response.once('error', (error) => {
+            this._aborted = true;
+            // Force clean-up, because some packages don't do this.
+            // TODO: Fix decompress-response
+            response.destroy();
+            this._beforeError(new ReadError(error, this));
+        });
+        response.once('aborted', () => {
+            this._aborted = true;
+            this._beforeError(new ReadError({
+                name: 'Error',
+                message: 'The server aborted pending request',
+                code: 'ECONNRESET',
+            }, this));
+        });
+        this.emit('downloadProgress', this.downloadProgress);
+        const rawCookies = response.headers['set-cookie'];
+        if (distribution.object(options.cookieJar) && rawCookies) {
+            let promises = rawCookies.map(async (rawCookie) => options.cookieJar.setCookie(rawCookie, url.toString()));
+            if (options.ignoreInvalidCookies) {
+                // eslint-disable-next-line @typescript-eslint/no-floating-promises
+                promises = promises.map(async (promise) => {
+                    try {
+                        await promise;
+                    }
+                    catch { }
+                });
+            }
+            try {
+                await Promise.all(promises);
+            }
+            catch (error) {
+                this._beforeError(error);
+                return;
+            }
+        }
+        // The above is running a promise, therefore we need to check if this request has been aborted yet again.
+        if (this.isAborted) {
+            return;
+        }
+        if (response.headers.location && redirectCodes.has(statusCode)) {
+            // We're being redirected, we don't care about the response.
+            // It'd be best to abort the request, but we can't because
+            // we would have to sacrifice the TCP connection. We don't want that.
+            const shouldFollow = typeof options.followRedirect === 'function' ? options.followRedirect(typedResponse) : options.followRedirect;
+            if (shouldFollow) {
+                response.resume();
+                this._cancelTimeouts();
+                this._unproxyEvents();
+                if (this.redirectUrls.length >= options.maxRedirects) {
+                    this._beforeError(new MaxRedirectsError(this));
+                    return;
+                }
+                this._request = undefined;
+                const updatedOptions = new Options(undefined, undefined, this.options);
+                const serverRequestedGet = statusCode === 303 && updatedOptions.method !== 'GET' && updatedOptions.method !== 'HEAD';
+                const canRewrite = statusCode !== 307 && statusCode !== 308;
+                const userRequestedGet = updatedOptions.methodRewriting && canRewrite;
+                if (serverRequestedGet || userRequestedGet) {
+                    updatedOptions.method = 'GET';
+                    updatedOptions.body = undefined;
+                    updatedOptions.json = undefined;
+                    updatedOptions.form = undefined;
+                    delete updatedOptions.headers['content-length'];
+                }
+                try {
+                    // We need this in order to support UTF-8
+                    const redirectBuffer = external_node_buffer_namespaceObject.Buffer.from(response.headers.location, 'binary').toString();
+                    const redirectUrl = new URL(redirectBuffer, url);
+                    if (!isUnixSocketURL(url) && isUnixSocketURL(redirectUrl)) {
+                        this._beforeError(new RequestError('Cannot redirect to UNIX socket', {}, this));
+                        return;
+                    }
+                    // Redirecting to a different site, clear sensitive data.
+                    if (redirectUrl.hostname !== url.hostname || redirectUrl.port !== url.port) {
+                        if ('host' in updatedOptions.headers) {
+                            delete updatedOptions.headers.host;
+                        }
+                        if ('cookie' in updatedOptions.headers) {
+                            delete updatedOptions.headers.cookie;
+                        }
+                        if ('authorization' in updatedOptions.headers) {
+                            delete updatedOptions.headers.authorization;
+                        }
+                        if (updatedOptions.username || updatedOptions.password) {
+                            updatedOptions.username = '';
+                            updatedOptions.password = '';
+                        }
+                    }
+                    else {
+                        redirectUrl.username = updatedOptions.username;
+                        redirectUrl.password = updatedOptions.password;
+                    }
+                    this.redirectUrls.push(redirectUrl);
+                    updatedOptions.prefixUrl = '';
+                    updatedOptions.url = redirectUrl;
+                    for (const hook of updatedOptions.hooks.beforeRedirect) {
+                        // eslint-disable-next-line no-await-in-loop
+                        await hook(updatedOptions, typedResponse);
+                    }
+                    this.emit('redirect', updatedOptions, typedResponse);
+                    this.options = updatedOptions;
+                    await this._makeRequest();
+                }
+                catch (error) {
+                    this._beforeError(error);
+                    return;
+                }
+                return;
+            }
+        }
+        // `HTTPError`s always have `error.response.body` defined.
+        // Therefore, we cannot retry if `options.throwHttpErrors` is false.
+        // On the last retry, if `options.throwHttpErrors` is false, we would need to return the body,
+        // but that wouldn't be possible since the body would be already read in `error.response.body`.
+        if (options.isStream && options.throwHttpErrors && !isResponseOk(typedResponse)) {
+            this._beforeError(new HTTPError(typedResponse));
+            return;
+        }
+        response.on('readable', () => {
+            if (this._triggerRead) {
+                this._read();
+            }
+        });
+        this.on('resume', () => {
+            response.resume();
+        });
+        this.on('pause', () => {
+            response.pause();
+        });
+        response.once('end', () => {
+            this.push(null);
+        });
+        if (this._noPipe) {
+            const success = await this._setRawBody();
+            if (success) {
+                this.emit('response', response);
+            }
+            return;
+        }
+        this.emit('response', response);
+        for (const destination of this._pipedServerResponses) {
+            if (destination.headersSent) {
+                continue;
+            }
+            // eslint-disable-next-line guard-for-in
+            for (const key in response.headers) {
+                const isAllowed = options.decompress ? key !== 'content-encoding' : true;
+                const value = response.headers[key];
+                if (isAllowed) {
+                    destination.setHeader(key, value);
+                }
+            }
+            destination.statusCode = statusCode;
+        }
+    }
+    async _setRawBody(from = this) {
+        if (from.readableEnded) {
+            return false;
+        }
+        try {
+            // Errors are emitted via the `error` event
+            const fromArray = await from.toArray();
+            const rawBody = isBuffer(fromArray.at(0)) ? external_node_buffer_namespaceObject.Buffer.concat(fromArray) : external_node_buffer_namespaceObject.Buffer.from(fromArray.join(''));
+            // On retry Request is destroyed with no error, therefore the above will successfully resolve.
+            // So in order to check if this was really successfull, we need to check if it has been properly ended.
+            if (!this.isAborted) {
+                this.response.rawBody = rawBody;
+                return true;
+            }
+        }
+        catch { }
+        return false;
+    }
+    async _onResponse(response) {
+        try {
+            await this._onResponseBase(response);
+        }
+        catch (error) {
+            /* istanbul ignore next: better safe than sorry */
+            this._beforeError(error);
+        }
+    }
+    _onRequest(request) {
+        const { options } = this;
+        const { timeout, url } = options;
+        dist_source(request);
+        if (this.options.http2) {
+            // Unset stream timeout, as the `timeout` option was used only for connection timeout.
+            request.setTimeout(0);
+        }
+        this._cancelTimeouts = timedOut(request, timeout, url);
+        const responseEventName = options.cache ? 'cacheableResponse' : 'response';
+        request.once(responseEventName, (response) => {
+            void this._onResponse(response);
+        });
+        request.once('error', (error) => {
+            this._aborted = true;
+            // Force clean-up, because some packages (e.g. nock) don't do this.
+            request.destroy();
+            error = error instanceof timed_out_TimeoutError ? new TimeoutError(error, this.timings, this) : new RequestError(error.message, error, this);
+            this._beforeError(error);
+        });
+        this._unproxyEvents = proxyEvents(request, this, proxiedRequestEvents);
+        this._request = request;
+        this.emit('uploadProgress', this.uploadProgress);
+        this._sendBody();
+        this.emit('request', request);
+    }
+    async _asyncWrite(chunk) {
+        return new Promise((resolve, reject) => {
+            super.write(chunk, error => {
+                if (error) {
+                    reject(error);
+                    return;
+                }
+                resolve();
+            });
+        });
+    }
+    _sendBody() {
+        // Send body
+        const { body } = this.options;
+        const currentRequest = this.redirectUrls.length === 0 ? this : this._request ?? this;
+        if (distribution.nodeStream(body)) {
+            body.pipe(currentRequest);
+        }
+        else if (distribution.generator(body) || distribution.asyncGenerator(body)) {
+            (async () => {
+                try {
+                    for await (const chunk of body) {
+                        await this._asyncWrite(chunk);
+                    }
+                    super.end();
+                }
+                catch (error) {
+                    this._beforeError(error);
+                }
+            })();
+        }
+        else if (!distribution.undefined(body)) {
+            this._writeRequest(body, undefined, () => { });
+            currentRequest.end();
+        }
+        else if (this._cannotHaveBody || this._noPipe) {
+            currentRequest.end();
+        }
+    }
+    _prepareCache(cache) {
+        if (!cacheableStore.has(cache)) {
+            const cacheableRequest = new dist(((requestOptions, handler) => {
+                const result = requestOptions._request(requestOptions, handler);
+                // TODO: remove this when `cacheable-request` supports async request functions.
+                if (distribution.promise(result)) {
+                    // We only need to implement the error handler in order to support HTTP2 caching.
+                    // The result will be a promise anyway.
+                    // @ts-expect-error ignore
+                    result.once = (event, handler) => {
+                        if (event === 'error') {
+                            (async () => {
+                                try {
+                                    await result;
+                                }
+                                catch (error) {
+                                    handler(error);
+                                }
+                            })();
+                        }
+                        else if (event === 'abort' || event === 'destroy') {
+                            // The empty catch is needed here in case when
+                            // it rejects before it's `await`ed in `_makeRequest`.
+                            (async () => {
+                                try {
+                                    const request = (await result);
+                                    request.once(event, handler);
+                                }
+                                catch { }
+                            })();
+                        }
+                        else {
+                            /* istanbul ignore next: safety check */
+                            throw new Error(`Unknown HTTP2 promise event: ${event}`);
+                        }
+                        return result;
+                    };
+                }
+                return result;
+            }), cache);
+            cacheableStore.set(cache, cacheableRequest.request());
+        }
+    }
+    async _createCacheableRequest(url, options) {
+        return new Promise((resolve, reject) => {
+            // TODO: Remove `utils/url-to-options.ts` when `cacheable-request` is fixed
+            Object.assign(options, urlToOptions(url));
+            let request;
+            // TODO: Fix `cacheable-response`. This is ugly.
+            const cacheRequest = cacheableStore.get(options.cache)(options, async (response) => {
+                response._readableState.autoDestroy = false;
+                if (request) {
+                    const fix = () => {
+                        if (response.req) {
+                            response.complete = response.req.res.complete;
+                        }
+                    };
+                    response.prependOnceListener('end', fix);
+                    fix();
+                    (await request).emit('cacheableResponse', response);
+                }
+                resolve(response);
+            });
+            cacheRequest.once('error', reject);
+            cacheRequest.once('request', async (requestOrPromise) => {
+                request = requestOrPromise;
+                resolve(request);
+            });
+        });
+    }
+    async _makeRequest() {
+        const { options } = this;
+        const { headers, username, password } = options;
+        const cookieJar = options.cookieJar;
+        for (const key in headers) {
+            if (distribution.undefined(headers[key])) {
+                // eslint-disable-next-line @typescript-eslint/no-dynamic-delete
+                delete headers[key];
+            }
+            else if (distribution.null(headers[key])) {
+                throw new TypeError(`Use \`undefined\` instead of \`null\` to delete the \`${key}\` header`);
+            }
+        }
+        if (options.decompress && distribution.undefined(headers['accept-encoding'])) {
+            headers['accept-encoding'] = supportsBrotli ? 'gzip, deflate, br' : 'gzip, deflate';
+        }
+        if (username || password) {
+            const credentials = external_node_buffer_namespaceObject.Buffer.from(`${username}:${password}`).toString('base64');
+            headers.authorization = `Basic ${credentials}`;
+        }
+        // Set cookies
+        if (cookieJar) {
+            const cookieString = await cookieJar.getCookieString(options.url.toString());
+            if (distribution.nonEmptyString(cookieString)) {
+                headers.cookie = cookieString;
+            }
+        }
+        // Reset `prefixUrl`
+        options.prefixUrl = '';
+        let request;
+        for (const hook of options.hooks.beforeRequest) {
+            // eslint-disable-next-line no-await-in-loop
+            const result = await hook(options);
+            if (!distribution.undefined(result)) {
+                // @ts-expect-error Skip the type mismatch to support abstract responses
+                request = () => result;
+                break;
+            }
+        }
+        request ||= options.getRequestFunction();
+        const url = options.url;
+        this._requestOptions = options.createNativeRequestOptions();
+        if (options.cache) {
+            this._requestOptions._request = request;
+            this._requestOptions.cache = options.cache;
+            this._requestOptions.body = options.body;
+            this._prepareCache(options.cache);
+        }
+        // Cache support
+        const function_ = options.cache ? this._createCacheableRequest : request;
+        try {
+            // We can't do `await fn(...)`,
+            // because stream `error` event can be emitted before `Promise.resolve()`.
+            let requestOrResponse = function_(url, this._requestOptions);
+            if (distribution.promise(requestOrResponse)) {
+                requestOrResponse = await requestOrResponse;
+            }
+            // Fallback
+            if (distribution.undefined(requestOrResponse)) {
+                requestOrResponse = options.getFallbackRequestFunction()(url, this._requestOptions);
+                if (distribution.promise(requestOrResponse)) {
+                    requestOrResponse = await requestOrResponse;
+                }
+            }
+            if (is_client_request(requestOrResponse)) {
+                this._onRequest(requestOrResponse);
+            }
+            else if (this.writable) {
+                this.once('finish', () => {
+                    void this._onResponse(requestOrResponse);
+                });
+                this._sendBody();
+            }
+            else {
+                void this._onResponse(requestOrResponse);
+            }
+        }
+        catch (error) {
+            if (error instanceof types_CacheError) {
+                throw new CacheError(error, this);
+            }
+            throw error;
+        }
+    }
+    async _error(error) {
+        try {
+            if (error instanceof HTTPError && !this.options.throwHttpErrors) {
+                // This branch can be reached only when using the Promise API
+                // Skip calling the hooks on purpose.
+                // See https://github.com/sindresorhus/got/issues/2103
+            }
+            else {
+                for (const hook of this.options.hooks.beforeError) {
+                    // eslint-disable-next-line no-await-in-loop
+                    error = await hook(error);
+                }
+            }
+        }
+        catch (error_) {
+            error = new RequestError(error_.message, error_, this);
+        }
+        this.destroy(error);
+    }
+    _writeRequest(chunk, encoding, callback) {
+        if (!this._request || this._request.destroyed) {
+            // Probably the `ClientRequest` instance will throw
+            return;
+        }
+        this._request.write(chunk, encoding, (error) => {
+            // The `!destroyed` check is required to prevent `uploadProgress` being emitted after the stream was destroyed
+            if (!error && !this._request.destroyed) {
+                this._uploadedSize += external_node_buffer_namespaceObject.Buffer.byteLength(chunk, encoding);
+                const progress = this.uploadProgress;
+                if (progress.percent < 1) {
+                    this.emit('uploadProgress', progress);
+                }
+            }
+            callback(error);
+        });
+    }
+    /**
+    The remote IP address.
+    */
+    get ip() {
+        return this.socket?.remoteAddress;
+    }
+    /**
+    Indicates whether the request has been aborted or not.
+    */
+    get isAborted() {
+        return this._aborted;
+    }
+    get socket() {
+        return this._request?.socket ?? undefined;
+    }
+    /**
+    Progress event for downloading (receiving a response).
+    */
+    get downloadProgress() {
+        let percent;
+        if (this._responseSize) {
+            percent = this._downloadedSize / this._responseSize;
+        }
+        else if (this._responseSize === this._downloadedSize) {
+            percent = 1;
+        }
+        else {
+            percent = 0;
+        }
+        return {
+            percent,
+            transferred: this._downloadedSize,
+            total: this._responseSize,
+        };
+    }
+    /**
+    Progress event for uploading (sending a request).
+    */
+    get uploadProgress() {
+        let percent;
+        if (this._bodySize) {
+            percent = this._uploadedSize / this._bodySize;
+        }
+        else if (this._bodySize === this._uploadedSize) {
+            percent = 1;
+        }
+        else {
+            percent = 0;
+        }
+        return {
+            percent,
+            transferred: this._uploadedSize,
+            total: this._bodySize,
+        };
+    }
+    /**
+    The object contains the following properties:
+
+    - `start` - Time when the request started.
+    - `socket` - Time when a socket was assigned to the request.
+    - `lookup` - Time when the DNS lookup finished.
+    - `connect` - Time when the socket successfully connected.
+    - `secureConnect` - Time when the socket securely connected.
+    - `upload` - Time when the request finished uploading.
+    - `response` - Time when the request fired `response` event.
+    - `end` - Time when the response fired `end` event.
+    - `error` - Time when the request fired `error` event.
+    - `abort` - Time when the request fired `abort` event.
+    - `phases`
+        - `wait` - `timings.socket - timings.start`
+        - `dns` - `timings.lookup - timings.socket`
+        - `tcp` - `timings.connect - timings.lookup`
+        - `tls` - `timings.secureConnect - timings.connect`
+        - `request` - `timings.upload - (timings.secureConnect || timings.connect)`
+        - `firstByte` - `timings.response - timings.upload`
+        - `download` - `timings.end - timings.response`
+        - `total` - `(timings.end || timings.error || timings.abort) - timings.start`
+
+    If something has not been measured yet, it will be `undefined`.
+
+    __Note__: The time is a `number` representing the milliseconds elapsed since the UNIX epoch.
+    */
+    get timings() {
+        return this._request?.timings;
+    }
+    /**
+    Whether the response was retrieved from the cache.
+    */
+    get isFromCache() {
+        return this._isFromCache;
+    }
+    get reusedSocket() {
+        return this._request?.reusedSocket;
+    }
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/as-promise/types.js
+
+/**
+An error to be thrown when the request is aborted with `.cancel()`.
+*/
+class types_CancelError extends RequestError {
+    constructor(request) {
+        super('Promise was canceled', {}, request);
+        this.name = 'CancelError';
+        this.code = 'ERR_CANCELED';
+    }
+    /**
+    Whether the promise is canceled.
+    */
+    get isCanceled() {
+        return true;
+    }
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/as-promise/index.js
+
+
+
+
+
+
+
+
+const as_promise_proxiedRequestEvents = [
+    'request',
+    'response',
+    'redirect',
+    'uploadProgress',
+    'downloadProgress',
+];
+function asPromise(firstRequest) {
+    let globalRequest;
+    let globalResponse;
+    let normalizedOptions;
+    const emitter = new external_node_events_.EventEmitter();
+    const promise = new PCancelable((resolve, reject, onCancel) => {
+        onCancel(() => {
+            globalRequest.destroy();
+        });
+        onCancel.shouldReject = false;
+        onCancel(() => {
+            reject(new types_CancelError(globalRequest));
+        });
+        const makeRequest = (retryCount) => {
+            // Errors when a new request is made after the promise settles.
+            // Used to detect a race condition.
+            // See https://github.com/sindresorhus/got/issues/1489
+            onCancel(() => { });
+            const request = firstRequest ?? new Request(undefined, undefined, normalizedOptions);
+            request.retryCount = retryCount;
+            request._noPipe = true;
+            globalRequest = request;
+            request.once('response', async (response) => {
+                // Parse body
+                const contentEncoding = (response.headers['content-encoding'] ?? '').toLowerCase();
+                const isCompressed = contentEncoding === 'gzip' || contentEncoding === 'deflate' || contentEncoding === 'br';
+                const { options } = request;
+                if (isCompressed && !options.decompress) {
+                    response.body = response.rawBody;
+                }
+                else {
+                    try {
+                        response.body = parseBody(response, options.responseType, options.parseJson, options.encoding);
+                    }
+                    catch (error) {
+                        // Fall back to `utf8`
+                        try {
+                            response.body = response.rawBody.toString();
+                        }
+                        catch (error) {
+                            request._beforeError(new ParseError(error, response));
+                            return;
+                        }
+                        if (isResponseOk(response)) {
+                            request._beforeError(error);
+                            return;
+                        }
+                    }
+                }
+                try {
+                    const hooks = options.hooks.afterResponse;
+                    for (const [index, hook] of hooks.entries()) {
+                        // @ts-expect-error TS doesn't notice that CancelableRequest is a Promise
+                        // eslint-disable-next-line no-await-in-loop
+                        response = await hook(response, async (updatedOptions) => {
+                            options.merge(updatedOptions);
+                            options.prefixUrl = '';
+                            if (updatedOptions.url) {
+                                options.url = updatedOptions.url;
+                            }
+                            // Remove any further hooks for that request, because we'll call them anyway.
+                            // The loop continues. We don't want duplicates (asPromise recursion).
+                            options.hooks.afterResponse = options.hooks.afterResponse.slice(0, index);
+                            throw new RetryError(request);
+                        });
+                        if (!(distribution.object(response) && distribution.number(response.statusCode) && !distribution.nullOrUndefined(response.body))) {
+                            throw new TypeError('The `afterResponse` hook returned an invalid value');
+                        }
+                    }
+                }
+                catch (error) {
+                    request._beforeError(error);
+                    return;
+                }
+                globalResponse = response;
+                if (!isResponseOk(response)) {
+                    request._beforeError(new HTTPError(response));
+                    return;
+                }
+                request.destroy();
+                resolve(request.options.resolveBodyOnly ? response.body : response);
+            });
+            const onError = (error) => {
+                if (promise.isCanceled) {
+                    return;
+                }
+                const { options } = request;
+                if (error instanceof HTTPError && !options.throwHttpErrors) {
+                    const { response } = error;
+                    request.destroy();
+                    resolve(request.options.resolveBodyOnly ? response.body : response);
+                    return;
+                }
+                reject(error);
+            };
+            request.once('error', onError);
+            const previousBody = request.options?.body;
+            request.once('retry', (newRetryCount, error) => {
+                firstRequest = undefined;
+                const newBody = request.options.body;
+                if (previousBody === newBody && distribution.nodeStream(newBody)) {
+                    error.message = 'Cannot retry with consumed body stream';
+                    onError(error);
+                    return;
+                }
+                // This is needed! We need to reuse `request.options` because they can get modified!
+                // For example, by calling `promise.json()`.
+                normalizedOptions = request.options;
+                makeRequest(newRetryCount);
+            });
+            proxyEvents(request, emitter, as_promise_proxiedRequestEvents);
+            if (distribution.undefined(firstRequest)) {
+                void request.flush();
+            }
+        };
+        makeRequest(0);
+    });
+    promise.on = (event, function_) => {
+        emitter.on(event, function_);
+        return promise;
+    };
+    promise.off = (event, function_) => {
+        emitter.off(event, function_);
+        return promise;
+    };
+    const shortcut = (responseType) => {
+        const newPromise = (async () => {
+            // Wait until downloading has ended
+            await promise;
+            const { options } = globalResponse.request;
+            return parseBody(globalResponse, responseType, options.parseJson, options.encoding);
+        })();
+        // eslint-disable-next-line @typescript-eslint/no-floating-promises
+        Object.defineProperties(newPromise, Object.getOwnPropertyDescriptors(promise));
+        return newPromise;
+    };
+    promise.json = () => {
+        if (globalRequest.options) {
+            const { headers } = globalRequest.options;
+            if (!globalRequest.writableFinished && !('accept' in headers)) {
+                headers.accept = 'application/json';
+            }
+        }
+        return shortcut('json');
+    };
+    promise.buffer = () => shortcut('buffer');
+    promise.text = () => shortcut('text');
+    return promise;
+}
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/create.js
+
+
+
+
+
+const isGotInstance = (value) => distribution.function(value);
+const aliases = [
+    'get',
+    'post',
+    'put',
+    'patch',
+    'head',
+    'delete',
+];
+const create = (defaults) => {
+    defaults = {
+        options: new Options(undefined, undefined, defaults.options),
+        handlers: [...defaults.handlers],
+        mutableDefaults: defaults.mutableDefaults,
+    };
+    Object.defineProperty(defaults, 'mutableDefaults', {
+        enumerable: true,
+        configurable: false,
+        writable: false,
+    });
+    // Got interface
+    const got = ((url, options, defaultOptions = defaults.options) => {
+        const request = new Request(url, options, defaultOptions);
+        let promise;
+        const lastHandler = (normalized) => {
+            // Note: `options` is `undefined` when `new Options(...)` fails
+            request.options = normalized;
+            request._noPipe = !normalized?.isStream;
+            void request.flush();
+            if (normalized?.isStream) {
+                return request;
+            }
+            promise ||= asPromise(request);
+            return promise;
+        };
+        let iteration = 0;
+        const iterateHandlers = (newOptions) => {
+            const handler = defaults.handlers[iteration++] ?? lastHandler;
+            const result = handler(newOptions, iterateHandlers);
+            if (distribution.promise(result) && !request.options?.isStream) {
+                promise ||= asPromise(request);
+                if (result !== promise) {
+                    const descriptors = Object.getOwnPropertyDescriptors(promise);
+                    for (const key in descriptors) {
+                        if (key in result) {
+                            // eslint-disable-next-line @typescript-eslint/no-dynamic-delete
+                            delete descriptors[key];
+                        }
+                    }
+                    // eslint-disable-next-line @typescript-eslint/no-floating-promises
+                    Object.defineProperties(result, descriptors);
+                    result.cancel = promise.cancel;
+                }
+            }
+            return result;
+        };
+        return iterateHandlers(request.options);
+    });
+    got.extend = (...instancesOrOptions) => {
+        const options = new Options(undefined, undefined, defaults.options);
+        const handlers = [...defaults.handlers];
+        let mutableDefaults;
+        for (const value of instancesOrOptions) {
+            if (isGotInstance(value)) {
+                options.merge(value.defaults.options);
+                handlers.push(...value.defaults.handlers);
+                mutableDefaults = value.defaults.mutableDefaults;
+            }
+            else {
+                options.merge(value);
+                if (value.handlers) {
+                    handlers.push(...value.handlers);
+                }
+                mutableDefaults = value.mutableDefaults;
+            }
+        }
+        return create({
+            options,
+            handlers,
+            mutableDefaults: Boolean(mutableDefaults),
+        });
+    };
+    // Pagination
+    const paginateEach = (async function* (url, options) {
+        let normalizedOptions = new Options(url, options, defaults.options);
+        normalizedOptions.resolveBodyOnly = false;
+        const { pagination } = normalizedOptions;
+        assert.function(pagination.transform);
+        assert.function(pagination.shouldContinue);
+        assert.function(pagination.filter);
+        assert.function(pagination.paginate);
+        assert.number(pagination.countLimit);
+        assert.number(pagination.requestLimit);
+        assert.number(pagination.backoff);
+        const allItems = [];
+        let { countLimit } = pagination;
+        let numberOfRequests = 0;
+        while (numberOfRequests < pagination.requestLimit) {
+            if (numberOfRequests !== 0) {
+                // eslint-disable-next-line no-await-in-loop
+                await (0,external_node_timers_promises_namespaceObject.setTimeout)(pagination.backoff);
+            }
+            // eslint-disable-next-line no-await-in-loop
+            const response = (await got(undefined, undefined, normalizedOptions));
+            // eslint-disable-next-line no-await-in-loop
+            const parsed = await pagination.transform(response);
+            const currentItems = [];
+            assert.array(parsed);
+            for (const item of parsed) {
+                if (pagination.filter({ item, currentItems, allItems })) {
+                    if (!pagination.shouldContinue({ item, currentItems, allItems })) {
+                        return;
+                    }
+                    yield item;
+                    if (pagination.stackAllItems) {
+                        allItems.push(item);
+                    }
+                    currentItems.push(item);
+                    if (--countLimit <= 0) {
+                        return;
+                    }
+                }
+            }
+            const optionsToMerge = pagination.paginate({
+                response,
+                currentItems,
+                allItems,
+            });
+            if (optionsToMerge === false) {
+                return;
+            }
+            if (optionsToMerge === response.request.options) {
+                normalizedOptions = response.request.options;
+            }
+            else {
+                normalizedOptions.merge(optionsToMerge);
+                assert.any([distribution.urlInstance, distribution.undefined], optionsToMerge.url);
+                if (optionsToMerge.url !== undefined) {
+                    normalizedOptions.prefixUrl = '';
+                    normalizedOptions.url = optionsToMerge.url;
+                }
+            }
+            numberOfRequests++;
+        }
+    });
+    got.paginate = paginateEach;
+    got.paginate.all = (async (url, options) => {
+        const results = [];
+        for await (const item of paginateEach(url, options)) {
+            results.push(item);
+        }
+        return results;
+    });
+    // For those who like very descriptive names
+    got.paginate.each = paginateEach;
+    // Stream API
+    got.stream = ((url, options) => got(url, { ...options, isStream: true }));
+    // Shortcuts
+    for (const method of aliases) {
+        got[method] = ((url, options) => got(url, { ...options, method }));
+        got.stream[method] = ((url, options) => got(url, { ...options, method, isStream: true }));
+    }
+    if (!defaults.mutableDefaults) {
+        Object.freeze(defaults.handlers);
+        defaults.options.freeze();
+    }
+    Object.defineProperty(got, 'defaults', {
+        value: defaults,
+        writable: false,
+        configurable: false,
+        enumerable: true,
+    });
+    return got;
+};
+/* harmony default export */ const source_create = (create);
+
+;// CONCATENATED MODULE: ./node_modules/.pnpm/got@14.4.4/node_modules/got/dist/source/index.js
+
+
+const defaults = {
+    options: new Options(),
+    handlers: [],
+    mutableDefaults: false,
+};
+const got = source_create(defaults);
+/* harmony default export */ const got_dist_source = (got);
+// TODO: Remove this in the next major version.
+
+
+
+
+
+
+
+
+
+
+
+
+;// CONCATENATED MODULE: external "node:dns/promises"
+const external_node_dns_promises_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:dns/promises");
+// EXTERNAL MODULE: ./node_modules/.pnpm/@actions+cache@3.3.0/node_modules/@actions/cache/lib/cache.js
+var cache = __nccwpck_require__(5500);
+;// CONCATENATED MODULE: external "node:child_process"
+const external_node_child_process_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:child_process");
+;// CONCATENATED MODULE: external "node:path"
+const external_node_path_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:path");
+;// CONCATENATED MODULE: ./node_modules/.pnpm/github.com+DeterminateSystems+detsys-ts@eb87094f35072ac911526ad052c3437c9e0c42d6_jwilbi5lzaum4bdwlmwjqt7ufe/node_modules/detsys-ts/dist/index.js
+var __defProp = Object.defineProperty;
+var __export = (target, all) => {
+  for (var name in all)
+    __defProp(target, name, { get: all[name], enumerable: true });
+};
+
+// package.json
+var version = "1.0.0";
+
+// src/linux-release-info.ts
+
+
+
+var readFileAsync = (0,external_node_util_.promisify)(external_node_fs_namespaceObject.readFile);
+var linuxReleaseInfoOptionsDefaults = {
+  mode: "async",
+  customFile: null,
+  debug: false
+};
+function releaseInfo(infoOptions) {
+  const options = { ...linuxReleaseInfoOptionsDefaults, ...infoOptions };
+  const searchOsReleaseFileList = osReleaseFileList(
+    options.customFile
+  );
+  if (external_node_os_.type() !== "Linux") {
+    if (options.mode === "sync") {
+      return getOsInfo();
+    } else {
+      return Promise.resolve(getOsInfo());
+    }
+  }
+  if (options.mode === "sync") {
+    return readSyncOsreleaseFile(searchOsReleaseFileList, options);
+  } else {
+    return Promise.resolve(
+      readAsyncOsReleaseFile(searchOsReleaseFileList, options)
+    );
+  }
+}
+function formatFileData(sourceData, srcParseData) {
+  const lines = srcParseData.split("\n");
+  for (const line of lines) {
+    const lineData = line.split("=");
+    if (lineData.length === 2) {
+      lineData[1] = lineData[1].replace(/["'\r]/gi, "");
+      Object.defineProperty(sourceData, lineData[0].toLowerCase(), {
+        value: lineData[1],
+        writable: true,
+        enumerable: true,
+        configurable: true
+      });
+    }
+  }
+  return sourceData;
+}
+function osReleaseFileList(customFile) {
+  const DEFAULT_OS_RELEASE_FILES = ["/etc/os-release", "/usr/lib/os-release"];
+  if (!customFile) {
+    return DEFAULT_OS_RELEASE_FILES;
+  } else {
+    return Array(customFile);
+  }
+}
+function getOsInfo() {
+  return {
+    type: external_node_os_.type(),
+    platform: external_node_os_.platform(),
+    hostname: external_node_os_.hostname(),
+    arch: external_node_os_.arch(),
+    release: external_node_os_.release()
+  };
+}
+async function readAsyncOsReleaseFile(fileList, options) {
+  let fileData = null;
+  for (const osReleaseFile of fileList) {
+    try {
+      if (options.debug) {
+        console.log(`Trying to read '${osReleaseFile}'...`);
+      }
+      fileData = await readFileAsync(osReleaseFile, "binary");
+      if (options.debug) {
+        console.log(`Read data:
+${fileData}`);
+      }
+      break;
+    } catch (error3) {
+      if (options.debug) {
+        console.error(error3);
+      }
+    }
+  }
+  if (fileData === null) {
+    throw new Error("Cannot read os-release file!");
+  }
+  return formatFileData(getOsInfo(), fileData);
+}
+function readSyncOsreleaseFile(releaseFileList, options) {
+  let fileData = null;
+  for (const osReleaseFile of releaseFileList) {
+    try {
+      if (options.debug) {
+        console.log(`Trying to read '${osReleaseFile}'...`);
+      }
+      fileData = external_node_fs_namespaceObject.readFileSync(osReleaseFile, "binary");
+      if (options.debug) {
+        console.log(`Read data:
+${fileData}`);
+      }
+      break;
+    } catch (error3) {
+      if (options.debug) {
+        console.error(error3);
+      }
+    }
+  }
+  if (fileData === null) {
+    throw new Error("Cannot read os-release file!");
+  }
+  return formatFileData(getOsInfo(), fileData);
+}
+
+// src/actions-core-platform.ts
+
+
+
+var getWindowsInfo = async () => {
+  const { stdout: version2 } = await exec.getExecOutput(
+    'powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"',
+    void 0,
+    {
+      silent: true
+    }
+  );
+  const { stdout: name } = await exec.getExecOutput(
+    'powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"',
+    void 0,
+    {
+      silent: true
+    }
+  );
+  return {
+    name: name.trim(),
+    version: version2.trim()
+  };
+};
+var getMacOsInfo = async () => {
+  const { stdout } = await exec.getExecOutput("sw_vers", void 0, {
+    silent: true
+  });
+  const version2 = stdout.match(/ProductVersion:\s*(.+)/)?.[1] ?? "";
+  const name = stdout.match(/ProductName:\s*(.+)/)?.[1] ?? "";
+  return {
+    name,
+    version: version2
+  };
+};
+var getLinuxInfo = async () => {
+  let data = {};
+  try {
+    data = releaseInfo({ mode: "sync" });
+    core.debug(`Identified release info: ${JSON.stringify(data)}`);
+  } catch (e) {
+    core.debug(`Error collecting release info: ${e}`);
+  }
+  return {
+    name: getPropertyViaWithDefault(
+      data,
+      ["id", "name", "pretty_name", "id_like"],
+      "unknown"
+    ),
+    version: getPropertyViaWithDefault(
+      data,
+      ["version_id", "version", "version_codename"],
+      "unknown"
+    )
+  };
+};
+function getPropertyViaWithDefault(data, names, defaultValue) {
+  for (const name of names) {
+    const ret = getPropertyWithDefault(data, name, defaultValue);
+    if (ret !== defaultValue) {
+      return ret;
+    }
+  }
+  return defaultValue;
+}
+function getPropertyWithDefault(data, name, defaultValue) {
+  if (!data.hasOwnProperty(name)) {
+    return defaultValue;
+  }
+  const value = data[name];
+  if (typeof value !== typeof defaultValue) {
+    return defaultValue;
+  }
+  return value;
+}
+var platform2 = external_os_.platform();
+var arch2 = external_os_.arch();
+var isWindows = platform2 === "win32";
+var isMacOS = platform2 === "darwin";
+var isLinux = platform2 === "linux";
+async function getDetails() {
+  return {
+    ...await (isWindows ? getWindowsInfo() : isMacOS ? getMacOsInfo() : getLinuxInfo()),
+    platform: platform2,
+    arch: arch2,
+    isWindows,
+    isMacOS,
+    isLinux
+  };
+}
+
+// src/errors.ts
+function stringifyError(e) {
+  if (e instanceof Error) {
+    return e.message;
+  } else if (typeof e === "string") {
+    return e;
+  } else {
+    return JSON.stringify(e);
+  }
+}
+
+// src/backtrace.ts
+
+
+
+
+
+async function collectBacktraces(prefixes, startTimestampMs) {
+  if (isMacOS) {
+    return await collectBacktracesMacOS(prefixes, startTimestampMs);
+  }
+  if (isLinux) {
+    return await collectBacktracesSystemd(prefixes, startTimestampMs);
+  }
+  return /* @__PURE__ */ new Map();
+}
+async function collectBacktracesMacOS(prefixes, startTimestampMs) {
+  const backtraces = /* @__PURE__ */ new Map();
+  try {
+    const { stdout: logJson } = await exec.getExecOutput(
+      "log",
+      [
+        "show",
+        "--style",
+        "json",
+        "--last",
+        // Note we collect the last 1m only, because it should only take a few seconds to write the crash log.
+        // Therefore, any crashes before this 1m should be long done by now.
+        "1m",
+        "--no-info",
+        "--predicate",
+        "sender = 'ReportCrash'"
+      ],
+      {
+        silent: true
+      }
+    );
+    const sussyArray = JSON.parse(logJson);
+    if (!Array.isArray(sussyArray)) {
+      throw new Error(`Log json isn't an array: ${logJson}`);
+    }
+    if (sussyArray.length > 0) {
+      core.info(`Collecting crash data...`);
+      const delay = async (ms) => new Promise((resolve) => setTimeout(resolve, ms));
+      await delay(5e3);
+    }
+  } catch (e) {
+    core.debug(
+      "Failed to check logs for in-progress crash dumps; now proceeding with the assumption that all crash dumps completed."
+    );
+  }
+  const dirs = [
+    ["system", "/Library/Logs/DiagnosticReports/"],
+    ["user", `${process.env["HOME"]}/Library/Logs/DiagnosticReports/`]
+  ];
+  for (const [source, dir] of dirs) {
+    const fileNames = (await (0,promises_namespaceObject.readdir)(dir)).filter((fileName) => {
+      return prefixes.some((prefix) => fileName.startsWith(prefix));
+    }).filter((fileName) => {
+      return !fileName.endsWith(".diag");
+    });
+    const doGzip = (0,external_node_util_.promisify)(external_node_zlib_.gzip);
+    for (const fileName of fileNames) {
+      try {
+        if ((await (0,promises_namespaceObject.stat)(`${dir}/${fileName}`)).ctimeMs >= startTimestampMs) {
+          const logText = await (0,promises_namespaceObject.readFile)(`${dir}/${fileName}`);
+          const buf = await doGzip(logText);
+          backtraces.set(
+            `backtrace_value_${source}_${fileName}`,
+            buf.toString("base64")
+          );
+        }
+      } catch (innerError) {
+        backtraces.set(
+          `backtrace_failure_${source}_${fileName}`,
+          stringifyError(innerError)
+        );
+      }
+    }
+  }
+  return backtraces;
+}
+async function collectBacktracesSystemd(prefixes, startTimestampMs) {
+  const sinceSeconds = Math.ceil((Date.now() - startTimestampMs) / 1e3);
+  const backtraces = /* @__PURE__ */ new Map();
+  const coredumps = [];
+  try {
+    const { stdout: coredumpjson } = await exec.getExecOutput(
+      "coredumpctl",
+      ["--json=pretty", "list", "--since", `${sinceSeconds} seconds ago`],
+      {
+        silent: true
+      }
+    );
+    const sussyArray = JSON.parse(coredumpjson);
+    if (!Array.isArray(sussyArray)) {
+      throw new Error(`Coredump isn't an array: ${coredumpjson}`);
+    }
+    for (const sussyObject of sussyArray) {
+      const keys = Object.keys(sussyObject);
+      if (keys.includes("exe") && keys.includes("pid")) {
+        if (typeof sussyObject.exe == "string" && typeof sussyObject.pid == "number") {
+          const execParts = sussyObject.exe.split("/");
+          const binaryName = execParts[execParts.length - 1];
+          if (prefixes.some((prefix) => binaryName.startsWith(prefix))) {
+            coredumps.push({
+              exe: sussyObject.exe,
+              pid: sussyObject.pid
+            });
+          }
+        } else {
+          core.debug(
+            `Mysterious coredump entry missing exe string and/or pid number: ${JSON.stringify(sussyObject)}`
+          );
+        }
+      } else {
+        core.debug(
+          `Mysterious coredump entry missing exe value and/or pid value: ${JSON.stringify(sussyObject)}`
+        );
+      }
+    }
+  } catch (innerError) {
+    core.debug(
+      `Cannot collect backtraces: ${stringifyError(innerError)}`
+    );
+    return backtraces;
+  }
+  const doGzip = (0,external_node_util_.promisify)(external_node_zlib_.gzip);
+  for (const coredump of coredumps) {
+    try {
+      const { stdout: logText } = await exec.getExecOutput(
+        "coredumpctl",
+        ["info", `${coredump.pid}`],
+        {
+          silent: true
+        }
+      );
+      const buf = await doGzip(logText);
+      backtraces.set(`backtrace_value_${coredump.pid}`, buf.toString("base64"));
+    } catch (innerError) {
+      backtraces.set(
+        `backtrace_failure_${coredump.pid}`,
+        stringifyError(innerError)
+      );
+    }
+  }
+  return backtraces;
+}
+
+// src/correlation.ts
+
+
+var OPTIONAL_VARIABLES = ["INVOCATION_ID"];
+function identify(projectName) {
+  const ident = {
+    correlation_source: "github-actions",
+    repository: hashEnvironmentVariables("GHR", [
+      "GITHUB_SERVER_URL",
+      "GITHUB_REPOSITORY_OWNER",
+      "GITHUB_REPOSITORY_OWNER_ID",
+      "GITHUB_REPOSITORY",
+      "GITHUB_REPOSITORY_ID"
+    ]),
+    workflow: hashEnvironmentVariables("GHW", [
+      "GITHUB_SERVER_URL",
+      "GITHUB_REPOSITORY_OWNER",
+      "GITHUB_REPOSITORY_OWNER_ID",
+      "GITHUB_REPOSITORY",
+      "GITHUB_REPOSITORY_ID",
+      "GITHUB_WORKFLOW"
+    ]),
+    job: hashEnvironmentVariables("GHWJ", [
+      "GITHUB_SERVER_URL",
+      "GITHUB_REPOSITORY_OWNER",
+      "GITHUB_REPOSITORY_OWNER_ID",
+      "GITHUB_REPOSITORY",
+      "GITHUB_REPOSITORY_ID",
+      "GITHUB_WORKFLOW",
+      "GITHUB_JOB"
+    ]),
+    run: hashEnvironmentVariables("GHWJR", [
+      "GITHUB_SERVER_URL",
+      "GITHUB_REPOSITORY_OWNER",
+      "GITHUB_REPOSITORY_OWNER_ID",
+      "GITHUB_REPOSITORY",
+      "GITHUB_REPOSITORY_ID",
+      "GITHUB_WORKFLOW",
+      "GITHUB_JOB",
+      "GITHUB_RUN_ID"
+    ]),
+    run_differentiator: hashEnvironmentVariables("GHWJA", [
+      "GITHUB_SERVER_URL",
+      "GITHUB_REPOSITORY_OWNER",
+      "GITHUB_REPOSITORY_OWNER_ID",
+      "GITHUB_REPOSITORY",
+      "GITHUB_REPOSITORY_ID",
+      "GITHUB_WORKFLOW",
+      "GITHUB_JOB",
+      "GITHUB_RUN_ID",
+      "GITHUB_RUN_NUMBER",
+      "GITHUB_RUN_ATTEMPT",
+      "INVOCATION_ID"
+    ]),
+    groups: {
+      ci: "github-actions",
+      project: projectName,
+      github_organization: hashEnvironmentVariables("GHO", [
+        "GITHUB_SERVER_URL",
+        "GITHUB_REPOSITORY_OWNER",
+        "GITHUB_REPOSITORY_OWNER_ID"
+      ])
+    }
+  };
+  core.debug("Correlation data:");
+  core.debug(JSON.stringify(ident, null, 2));
+  return ident;
+}
+function hashEnvironmentVariables(prefix, variables) {
+  const hash = (0,external_node_crypto_namespaceObject.createHash)("sha256");
+  for (const varName of variables) {
+    let value = process.env[varName];
+    if (value === void 0) {
+      if (OPTIONAL_VARIABLES.includes(varName)) {
+        core.debug(
+          `Optional environment variable not set: ${varName} -- substituting with the variable name`
+        );
+        value = varName;
+      } else {
+        core.debug(
+          `Environment variable not set: ${varName} -- can't generate the requested identity`
+        );
+        return void 0;
+      }
+    }
+    hash.update(value);
+    hash.update("\0");
+  }
+  return `${prefix}-${hash.digest("hex")}`;
+}
+
+// src/ids-host.ts
+
+
+
+var DEFAULT_LOOKUP = "_detsys_ids._tcp.install.determinate.systems.";
+var ALLOWED_SUFFIXES = [
+  ".install.determinate.systems",
+  ".install.detsys.dev"
+];
+var DEFAULT_IDS_HOST = "https://install.determinate.systems";
+var LOOKUP = process.env["IDS_LOOKUP"] ?? DEFAULT_LOOKUP;
+var DEFAULT_TIMEOUT = 1e4;
+var IdsHost = class {
+  constructor(idsProjectName, diagnosticsSuffix, runtimeDiagnosticsUrl) {
+    this.idsProjectName = idsProjectName;
+    this.diagnosticsSuffix = diagnosticsSuffix;
+    this.runtimeDiagnosticsUrl = runtimeDiagnosticsUrl;
+    this.client = void 0;
+  }
+  async getGot(recordFailoverCallback) {
+    if (this.client === void 0) {
+      this.client = got_dist_source.extend({
+        timeout: {
+          request: DEFAULT_TIMEOUT
+        },
+        retry: {
+          limit: Math.max((await this.getUrlsByPreference()).length, 3),
+          methods: ["GET", "HEAD"]
+        },
+        hooks: {
+          beforeRetry: [
+            async (error3, retryCount) => {
+              const prevUrl = await this.getRootUrl();
+              this.markCurrentHostBroken();
+              const nextUrl = await this.getRootUrl();
+              if (recordFailoverCallback !== void 0) {
+                recordFailoverCallback(error3, prevUrl, nextUrl);
+              }
+              core.info(
+                `Retrying after error ${error3.code}, retry #: ${retryCount}`
+              );
+            }
+          ],
+          beforeRequest: [
+            async (options) => {
+              const currentUrl = options.url;
+              if (this.isUrlSubjectToDynamicUrls(currentUrl)) {
+                const newUrl = new URL(currentUrl);
+                const url = await this.getRootUrl();
+                newUrl.host = url.host;
+                options.url = newUrl;
+                core.debug(`Transmuted ${currentUrl} into ${newUrl}`);
+              } else {
+                core.debug(`No transmutations on ${currentUrl}`);
+              }
+            }
+          ]
+        }
+      });
+    }
+    return this.client;
+  }
+  markCurrentHostBroken() {
+    this.prioritizedURLs?.shift();
+  }
+  setPrioritizedUrls(urls) {
+    this.prioritizedURLs = urls;
+  }
+  isUrlSubjectToDynamicUrls(url) {
+    if (url.origin === DEFAULT_IDS_HOST) {
+      return true;
+    }
+    for (const suffix of ALLOWED_SUFFIXES) {
+      if (url.host.endsWith(suffix)) {
+        return true;
+      }
+    }
+    return false;
+  }
+  async getDynamicRootUrl() {
+    const idsHost = process.env["IDS_HOST"];
+    if (idsHost !== void 0) {
+      try {
+        return new URL(idsHost);
+      } catch (err) {
+        core.error(
+          `IDS_HOST environment variable is not a valid URL. Ignoring. ${stringifyError(err)}`
+        );
+      }
+    }
+    let url = void 0;
+    try {
+      const urls = await this.getUrlsByPreference();
+      url = urls[0];
+    } catch (err) {
+      core.error(
+        `Error collecting IDS URLs by preference: ${stringifyError(err)}`
+      );
+    }
+    if (url === void 0) {
+      return void 0;
+    } else {
+      return new URL(url);
+    }
+  }
+  async getRootUrl() {
+    const url = await this.getDynamicRootUrl();
+    if (url === void 0) {
+      return new URL(DEFAULT_IDS_HOST);
+    }
+    return url;
+  }
+  async getDiagnosticsUrl() {
+    if (this.runtimeDiagnosticsUrl === "") {
+      return void 0;
+    }
+    if (this.runtimeDiagnosticsUrl !== "-" && this.runtimeDiagnosticsUrl !== void 0) {
+      try {
+        return new URL(this.runtimeDiagnosticsUrl);
+      } catch (err) {
+        core.info(
+          `User-provided diagnostic endpoint ignored: not a valid URL: ${stringifyError(err)}`
+        );
+      }
+    }
+    try {
+      const diagnosticUrl = await this.getRootUrl();
+      diagnosticUrl.pathname += this.idsProjectName;
+      diagnosticUrl.pathname += "/";
+      diagnosticUrl.pathname += this.diagnosticsSuffix || "diagnostics";
+      return diagnosticUrl;
+    } catch (err) {
+      core.info(
+        `Generated diagnostic endpoint ignored, and diagnostics are disabled: not a valid URL: ${stringifyError(err)}`
+      );
+      return void 0;
+    }
+  }
+  async getUrlsByPreference() {
+    if (this.prioritizedURLs === void 0) {
+      this.prioritizedURLs = orderRecordsByPriorityWeight(
+        await discoverServiceRecords()
+      ).flatMap((record) => recordToUrl(record) || []);
+    }
+    return this.prioritizedURLs;
+  }
+};
+function recordToUrl(record) {
+  const urlStr = `https://${record.name}:${record.port}`;
+  try {
+    return new URL(urlStr);
+  } catch (err) {
+    core.debug(
+      `Record ${JSON.stringify(record)} produced an invalid URL: ${urlStr} (${err})`
+    );
+    return void 0;
+  }
+}
+async function discoverServiceRecords() {
+  return await discoverServicesStub((0,external_node_dns_promises_namespaceObject.resolveSrv)(LOOKUP), 1e3);
+}
+async function discoverServicesStub(lookup, timeout) {
+  const defaultFallback = new Promise(
+    (resolve, _reject) => {
+      setTimeout(resolve, timeout, []);
+    }
+  );
+  let records;
+  try {
+    records = await Promise.race([lookup, defaultFallback]);
+  } catch (reason) {
+    core.debug(`Error resolving SRV records: ${stringifyError(reason)}`);
+    records = [];
+  }
+  const acceptableRecords = records.filter((record) => {
+    for (const suffix of ALLOWED_SUFFIXES) {
+      if (record.name.endsWith(suffix)) {
+        return true;
+      }
+    }
+    core.debug(
+      `Unacceptable domain due to an invalid suffix: ${record.name}`
+    );
+    return false;
+  });
+  if (acceptableRecords.length === 0) {
+    core.debug(`No records found for ${LOOKUP}`);
+  } else {
+    core.debug(
+      `Resolved ${LOOKUP} to ${JSON.stringify(acceptableRecords)}`
+    );
+  }
+  return acceptableRecords;
+}
+function orderRecordsByPriorityWeight(records) {
+  const byPriorityWeight = /* @__PURE__ */ new Map();
+  for (const record of records) {
+    const existing = byPriorityWeight.get(record.priority);
+    if (existing) {
+      existing.push(record);
+    } else {
+      byPriorityWeight.set(record.priority, [record]);
+    }
+  }
+  const prioritizedRecords = [];
+  const keys = Array.from(byPriorityWeight.keys()).sort(
+    (a, b) => a - b
+  );
+  for (const priority of keys) {
+    const recordsByPrio = byPriorityWeight.get(priority);
+    if (recordsByPrio === void 0) {
+      continue;
+    }
+    prioritizedRecords.push(...weightedRandom(recordsByPrio));
+  }
+  return prioritizedRecords;
+}
+function weightedRandom(records) {
+  const scratchRecords = records.slice();
+  const result = [];
+  while (scratchRecords.length > 0) {
+    const weights = [];
+    {
+      for (let i = 0; i < scratchRecords.length; i++) {
+        weights.push(
+          scratchRecords[i].weight + (i > 0 ? scratchRecords[i - 1].weight : 0)
+        );
+      }
+    }
+    const point = Math.random() * weights[weights.length - 1];
+    for (let selectedIndex = 0; selectedIndex < weights.length; selectedIndex++) {
+      if (weights[selectedIndex] > point) {
+        result.push(scratchRecords.splice(selectedIndex, 1)[0]);
+        break;
+      }
+    }
+  }
+  return result;
+}
+
+// src/inputs.ts
+var inputs_exports = {};
+__export(inputs_exports, {
+  getArrayOfStrings: () => getArrayOfStrings,
+  getArrayOfStringsOrNull: () => getArrayOfStringsOrNull,
+  getBool: () => getBool,
+  getBoolOrUndefined: () => getBoolOrUndefined,
+  getMultilineStringOrNull: () => getMultilineStringOrNull,
+  getNumberOrNull: () => getNumberOrNull,
+  getString: () => getString,
+  getStringOrNull: () => getStringOrNull,
+  getStringOrUndefined: () => getStringOrUndefined,
+  handleString: () => handleString
+});
+
+var getBool = (name) => {
+  return core.getBooleanInput(name);
+};
+var getBoolOrUndefined = (name) => {
+  if (getStringOrUndefined(name) === void 0) {
+    return void 0;
+  }
+  return core.getBooleanInput(name);
+};
+var getArrayOfStrings = (name, separator) => {
+  const original = getString(name);
+  return handleString(original, separator);
+};
+var getArrayOfStringsOrNull = (name, separator) => {
+  const original = getStringOrNull(name);
+  if (original === null) {
+    return null;
+  } else {
+    return handleString(original, separator);
+  }
+};
+var handleString = (input, separator) => {
+  const sepChar = separator === "comma" ? "," : /\s+/;
+  const trimmed = input.trim();
+  if (trimmed === "") {
+    return [];
+  }
+  return trimmed.split(sepChar).map((s) => s.trim());
+};
+var getMultilineStringOrNull = (name) => {
+  const value = core.getMultilineInput(name);
+  if (value.length === 0) {
+    return null;
+  } else {
+    return value;
+  }
+};
+var getNumberOrNull = (name) => {
+  const value = core.getInput(name);
+  if (value === "") {
+    return null;
+  } else {
+    return Number(value);
+  }
+};
+var getString = (name) => {
+  return core.getInput(name);
+};
+var getStringOrNull = (name) => {
+  const value = core.getInput(name);
+  if (value === "") {
+    return null;
+  } else {
+    return value;
+  }
+};
+var getStringOrUndefined = (name) => {
+  const value = core.getInput(name);
+  if (value === "") {
+    return void 0;
+  } else {
+    return value;
+  }
+};
+
+// src/platform.ts
+var platform_exports = {};
+__export(platform_exports, {
+  getArchOs: () => getArchOs,
+  getNixPlatform: () => getNixPlatform
+});
+
+function getArchOs() {
+  const envArch = process.env.RUNNER_ARCH;
+  const envOs = process.env.RUNNER_OS;
+  if (envArch && envOs) {
+    return `${envArch}-${envOs}`;
+  } else {
+    core.error(
+      `Can't identify the platform: RUNNER_ARCH or RUNNER_OS undefined (${envArch}-${envOs})`
+    );
+    throw new Error("RUNNER_ARCH and/or RUNNER_OS is not defined");
+  }
+}
+function getNixPlatform(archOs) {
+  const archOsMap = /* @__PURE__ */ new Map([
+    ["X64-macOS", "x86_64-darwin"],
+    ["ARM64-macOS", "aarch64-darwin"],
+    ["X64-Linux", "x86_64-linux"],
+    ["ARM64-Linux", "aarch64-linux"]
+  ]);
+  const mappedTo = archOsMap.get(archOs);
+  if (mappedTo) {
+    return mappedTo;
+  } else {
+    core.error(
+      `ArchOs (${archOs}) doesn't map to a supported Nix platform.`
+    );
+    throw new Error(
+      `Cannot convert ArchOs (${archOs}) to a supported Nix platform.`
+    );
+  }
+}
+
+// src/sourcedef.ts
+
+function constructSourceParameters(legacyPrefix) {
+  return {
+    path: noisilyGetInput("path", legacyPrefix),
+    url: noisilyGetInput("url", legacyPrefix),
+    tag: noisilyGetInput("tag", legacyPrefix),
+    pr: noisilyGetInput("pr", legacyPrefix),
+    branch: noisilyGetInput("branch", legacyPrefix),
+    revision: noisilyGetInput("revision", legacyPrefix)
+  };
+}
+function noisilyGetInput(suffix, legacyPrefix) {
+  const preferredInput = getStringOrUndefined(`source-${suffix}`);
+  if (!legacyPrefix) {
+    return preferredInput;
+  }
+  const legacyInput = getStringOrUndefined(`${legacyPrefix}-${suffix}`);
+  if (preferredInput && legacyInput) {
+    core.warning(
+      `The supported option source-${suffix} and the legacy option ${legacyPrefix}-${suffix} are both set. Preferring source-${suffix}. Please stop setting ${legacyPrefix}-${suffix}.`
+    );
+    return preferredInput;
+  } else if (legacyInput) {
+    core.warning(
+      `The legacy option ${legacyPrefix}-${suffix} is set. Please migrate to source-${suffix}.`
+    );
+    return legacyInput;
+  } else {
+    return preferredInput;
+  }
+}
+
+// src/index.ts
+
+
+
+
+
+
+
+
+
+
+
+
+
+var EVENT_BACKTRACES = "backtrace";
+var EVENT_EXCEPTION = "exception";
+var EVENT_ARTIFACT_CACHE_HIT = "artifact_cache_hit";
+var EVENT_ARTIFACT_CACHE_MISS = "artifact_cache_miss";
+var EVENT_ARTIFACT_CACHE_PERSIST = "artifact_cache_persist";
+var EVENT_PREFLIGHT_REQUIRE_NIX_DENIED = "preflight-require-nix-denied";
+var FACT_ARTIFACT_FETCHED_FROM_CACHE = "artifact_fetched_from_cache";
+var FACT_ENDED_WITH_EXCEPTION = "ended_with_exception";
+var FACT_FINAL_EXCEPTION = "final_exception";
+var FACT_OS = "$os";
+var FACT_OS_VERSION = "$os_version";
+var FACT_SOURCE_URL = "source_url";
+var FACT_SOURCE_URL_ETAG = "source_url_etag";
+var FACT_NIX_LOCATION = "nix_location";
+var FACT_NIX_STORE_TRUST = "nix_store_trusted";
+var FACT_NIX_STORE_VERSION = "nix_store_version";
+var FACT_NIX_STORE_CHECK_METHOD = "nix_store_check_method";
+var FACT_NIX_STORE_CHECK_ERROR = "nix_store_check_error";
+var STATE_KEY_EXECUTION_PHASE = "detsys_action_execution_phase";
+var STATE_KEY_NIX_NOT_FOUND = "detsys_action_nix_not_found";
+var STATE_NOT_FOUND = "not-found";
+var STATE_KEY_CROSS_PHASE_ID = "detsys_cross_phase_id";
+var STATE_BACKTRACE_START_TIMESTAMP = "detsys_backtrace_start_timestamp";
+var DIAGNOSTIC_ENDPOINT_TIMEOUT_MS = 1e4;
+var CHECK_IN_ENDPOINT_TIMEOUT_MS = 1e3;
+var DetSysAction = class {
+  determineExecutionPhase() {
+    const currentPhase = core.getState(STATE_KEY_EXECUTION_PHASE);
+    if (currentPhase === "") {
+      core.saveState(STATE_KEY_EXECUTION_PHASE, "post");
+      return "main";
+    } else {
+      return "post";
+    }
+  }
+  constructor(actionOptions) {
+    this.actionOptions = makeOptionsConfident(actionOptions);
+    this.idsHost = new IdsHost(
+      this.actionOptions.idsProjectName,
+      actionOptions.diagnosticsSuffix,
+      // Note: we don't use actionsCore.getInput('diagnostic-endpoint') on purpose:
+      // getInput silently converts absent data to an empty string.
+      process.env["INPUT_DIAGNOSTIC-ENDPOINT"]
+    );
+    this.exceptionAttachments = /* @__PURE__ */ new Map();
+    this.nixStoreTrust = "unknown";
+    this.strictMode = getBool("_internal-strict-mode");
+    if (getBoolOrUndefined(
+      "_internal-obliterate-actions-id-token-request-variables"
+    ) === true) {
+      process.env["ACTIONS_ID_TOKEN_REQUEST_URL"] = void 0;
+      process.env["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = void 0;
+    }
+    this.features = {};
+    this.featureEventMetadata = {};
+    this.events = [];
+    this.getCrossPhaseId();
+    this.collectBacktraceSetup();
+    this.facts = {
+      $lib: "idslib",
+      $lib_version: version,
+      project: this.actionOptions.name,
+      ids_project: this.actionOptions.idsProjectName
+    };
+    const params = [
+      ["github_action_ref", "GITHUB_ACTION_REF"],
+      ["github_action_repository", "GITHUB_ACTION_REPOSITORY"],
+      ["github_event_name", "GITHUB_EVENT_NAME"],
+      ["$os", "RUNNER_OS"],
+      ["arch", "RUNNER_ARCH"]
+    ];
+    for (const [target, env] of params) {
+      const value = process.env[env];
+      if (value) {
+        this.facts[target] = value;
+      }
+    }
+    this.identity = identify(this.actionOptions.name);
+    this.archOs = getArchOs();
+    this.nixSystem = getNixPlatform(this.archOs);
+    this.facts.arch_os = this.archOs;
+    this.facts.nix_system = this.nixSystem;
+    {
+      getDetails().then((details) => {
+        if (details.name !== "unknown") {
+          this.addFact(FACT_OS, details.name);
+        }
+        if (details.version !== "unknown") {
+          this.addFact(FACT_OS_VERSION, details.version);
+        }
+      }).catch((e) => {
+        core.debug(
+          `Failure getting platform details: ${stringifyError2(e)}`
+        );
+      });
+    }
+    this.executionPhase = this.determineExecutionPhase();
+    this.facts.execution_phase = this.executionPhase;
+    if (this.actionOptions.fetchStyle === "gh-env-style") {
+      this.architectureFetchSuffix = this.archOs;
+    } else if (this.actionOptions.fetchStyle === "nix-style") {
+      this.architectureFetchSuffix = this.nixSystem;
+    } else if (this.actionOptions.fetchStyle === "universal") {
+      this.architectureFetchSuffix = "universal";
+    } else {
+      throw new Error(
+        `fetchStyle ${this.actionOptions.fetchStyle} is not a valid style`
+      );
+    }
+    this.sourceParameters = constructSourceParameters(
+      this.actionOptions.legacySourcePrefix
+    );
+    this.recordEvent(`begin_${this.executionPhase}`);
+  }
+  /**
+   * Attach a file to the diagnostics data in error conditions.
+   *
+   * The file at `location` doesn't need to exist when stapleFile is called.
+   *
+   * If the file doesn't exist or is unreadable when trying to staple the attachments, the JS error will be stored in a context value at `staple_failure_{name}`.
+   * If the file is readable, the file's contents will be stored in a context value at `staple_value_{name}`.
+   */
+  stapleFile(name, location) {
+    this.exceptionAttachments.set(name, location);
+  }
+  /**
+   * Execute the Action as defined.
+   */
+  execute() {
+    this.executeAsync().catch((error3) => {
+      console.log(error3);
+      process.exitCode = 1;
+    });
+  }
+  getTemporaryName() {
+    const tmpDir = process.env["RUNNER_TEMP"] || (0,external_node_os_.tmpdir)();
+    return external_node_path_namespaceObject.join(tmpDir, `${this.actionOptions.name}-${(0,external_node_crypto_namespaceObject.randomUUID)()}`);
+  }
+  addFact(key, value) {
+    this.facts[key] = value;
+  }
+  async getDiagnosticsUrl() {
+    return await this.idsHost.getDiagnosticsUrl();
+  }
+  getUniqueId() {
+    return this.identity.run_differentiator || process.env.RUNNER_TRACKING_ID || (0,external_node_crypto_namespaceObject.randomUUID)();
+  }
+  // This ID will be saved in the action's state, to be persisted across phase steps
+  getCrossPhaseId() {
+    let crossPhaseId = core.getState(STATE_KEY_CROSS_PHASE_ID);
+    if (crossPhaseId === "") {
+      crossPhaseId = (0,external_node_crypto_namespaceObject.randomUUID)();
+      core.saveState(STATE_KEY_CROSS_PHASE_ID, crossPhaseId);
+    }
+    return crossPhaseId;
+  }
+  getCorrelationHashes() {
+    return this.identity;
+  }
+  recordEvent(eventName, context = {}) {
+    const prefixedName = eventName === "$feature_flag_called" ? eventName : `${this.actionOptions.eventPrefix}${eventName}`;
+    this.events.push({
+      event_name: prefixedName,
+      context,
+      correlation: this.identity,
+      facts: this.facts,
+      features: this.featureEventMetadata,
+      timestamp: /* @__PURE__ */ new Date(),
+      uuid: (0,external_node_crypto_namespaceObject.randomUUID)()
+    });
+  }
+  /**
+   * Unpacks the closure returned by `fetchArtifact()`, imports the
+   * contents into the Nix store, and returns the path of the executable at
+   * `/nix/store/STORE_PATH/bin/${bin}`.
+   */
+  async unpackClosure(bin) {
+    const artifact = await this.fetchArtifact();
+    const { stdout } = await (0,external_node_util_.promisify)(external_node_child_process_namespaceObject.exec)(
+      `cat "${artifact}" | xz -d | nix-store --import`
+    );
+    const paths = stdout.split(external_node_os_.EOL);
+    const lastPath = paths.at(-2);
+    return `${lastPath}/bin/${bin}`;
+  }
+  /**
+   * Fetches the executable at the URL determined by the `source-*` inputs and
+   * other facts, `chmod`s it, and returns the path to the executable on disk.
+   */
+  async fetchExecutable() {
+    const binaryPath = await this.fetchArtifact();
+    await (0,promises_namespaceObject.chmod)(binaryPath, promises_namespaceObject.constants.S_IXUSR | promises_namespaceObject.constants.S_IXGRP);
+    return binaryPath;
+  }
+  get isMain() {
+    return this.executionPhase === "main";
+  }
+  get isPost() {
+    return this.executionPhase === "post";
+  }
+  async executeAsync() {
+    try {
+      await this.checkIn();
+      process.env.DETSYS_CORRELATION = JSON.stringify(
+        this.getCorrelationHashes()
+      );
+      if (!await this.preflightRequireNix()) {
+        this.recordEvent(EVENT_PREFLIGHT_REQUIRE_NIX_DENIED);
+        return;
+      } else {
+        await this.preflightNixStoreInfo();
+        this.addFact(FACT_NIX_STORE_TRUST, this.nixStoreTrust);
+      }
+      if (this.isMain) {
+        await this.main();
+      } else if (this.isPost) {
+        await this.post();
+      }
+      this.addFact(FACT_ENDED_WITH_EXCEPTION, false);
+    } catch (e) {
+      this.addFact(FACT_ENDED_WITH_EXCEPTION, true);
+      const reportable = stringifyError2(e);
+      this.addFact(FACT_FINAL_EXCEPTION, reportable);
+      if (this.isPost) {
+        core.warning(reportable);
+      } else {
+        core.setFailed(reportable);
+      }
+      const doGzip = (0,external_node_util_.promisify)(external_node_zlib_.gzip);
+      const exceptionContext = /* @__PURE__ */ new Map();
+      for (const [attachmentLabel, filePath] of this.exceptionAttachments) {
+        try {
+          const logText = (0,external_node_fs_namespaceObject.readFileSync)(filePath);
+          const buf = await doGzip(logText);
+          exceptionContext.set(
+            `staple_value_${attachmentLabel}`,
+            buf.toString("base64")
+          );
+        } catch (innerError) {
+          exceptionContext.set(
+            `staple_failure_${attachmentLabel}`,
+            stringifyError2(innerError)
+          );
+        }
+      }
+      this.recordEvent(EVENT_EXCEPTION, Object.fromEntries(exceptionContext));
+    } finally {
+      if (this.isPost) {
+        await this.collectBacktraces();
+      }
+      await this.complete();
+    }
+  }
+  async getClient() {
+    return await this.idsHost.getGot(
+      (incitingError, prevUrl, nextUrl) => {
+        this.recordPlausibleTimeout(incitingError);
+        this.recordEvent("ids-failover", {
+          previousUrl: prevUrl.toString(),
+          nextUrl: nextUrl.toString()
+        });
+      }
+    );
+  }
+  async checkIn() {
+    const checkin = await this.requestCheckIn();
+    if (checkin === void 0) {
+      return;
+    }
+    this.features = checkin.options;
+    for (const [key, feature] of Object.entries(this.features)) {
+      this.featureEventMetadata[key] = feature.variant;
+    }
+    const impactSymbol = /* @__PURE__ */ new Map([
+      ["none", "\u26AA"],
+      ["maintenance", "\u{1F6E0}\uFE0F"],
+      ["minor", "\u{1F7E1}"],
+      ["major", "\u{1F7E0}"],
+      ["critical", "\u{1F534}"]
+    ]);
+    const defaultImpactSymbol = "\u{1F535}";
+    if (checkin.status !== null) {
+      const summaries = [];
+      for (const incident of checkin.status.incidents) {
+        summaries.push(
+          `${impactSymbol.get(incident.impact) || defaultImpactSymbol} ${incident.status.replace("_", " ")}: ${incident.name} (${incident.shortlink})`
+        );
+      }
+      for (const maintenance of checkin.status.scheduled_maintenances) {
+        summaries.push(
+          `${impactSymbol.get(maintenance.impact) || defaultImpactSymbol} ${maintenance.status.replace("_", " ")}: ${maintenance.name} (${maintenance.shortlink})`
+        );
+      }
+      if (summaries.length > 0) {
+        core.info(
+          // Bright red, Bold, Underline
+          `${"\x1B[0;31m"}${"\x1B[1m"}${"\x1B[4m"}${checkin.status.page.name} Status`
+        );
+        for (const notice of summaries) {
+          core.info(notice);
+        }
+        core.info(`See: ${checkin.status.page.url}`);
+        core.info(``);
+      }
+    }
+  }
+  getFeature(name) {
+    if (!this.features.hasOwnProperty(name)) {
+      return void 0;
+    }
+    const result = this.features[name];
+    if (result === void 0) {
+      return void 0;
+    }
+    this.recordEvent("$feature_flag_called", {
+      $feature_flag: name,
+      $feature_flag_response: result.variant
+    });
+    return result;
+  }
+  /**
+   * Check in to install.determinate.systems, to accomplish three things:
+   *
+   * 1. Preflight the server selected from IdsHost, to increase the chances of success.
+   * 2. Fetch any incidents and maintenance events to let users know in case things are weird.
+   * 3. Get feature flag data so we can gently roll out new features.
+   */
+  async requestCheckIn() {
+    for (let attemptsRemaining = 5; attemptsRemaining > 0; attemptsRemaining--) {
+      const checkInUrl = await this.getCheckInUrl();
+      if (checkInUrl === void 0) {
+        return void 0;
+      }
+      try {
+        core.debug(`Preflighting via ${checkInUrl}`);
+        checkInUrl.searchParams.set("ci", "github");
+        checkInUrl.searchParams.set(
+          "correlation",
+          JSON.stringify(this.identity)
+        );
+        return (await this.getClient()).get(checkInUrl, {
+          timeout: {
+            request: CHECK_IN_ENDPOINT_TIMEOUT_MS
+          }
+        }).json();
+      } catch (e) {
+        this.recordPlausibleTimeout(e);
+        core.debug(`Error checking in: ${stringifyError2(e)}`);
+        this.idsHost.markCurrentHostBroken();
+      }
+    }
+    return void 0;
+  }
+  recordPlausibleTimeout(e) {
+    if (e instanceof TimeoutError && "timings" in e && "request" in e) {
+      const reportContext = {
+        url: e.request.requestUrl?.toString(),
+        retry_count: e.request.retryCount
+      };
+      for (const [key, value] of Object.entries(e.timings.phases)) {
+        if (Number.isFinite(value)) {
+          reportContext[`timing_phase_${key}`] = value;
+        }
+      }
+      this.recordEvent("timeout", reportContext);
+    }
+  }
+  /**
+   * Fetch an artifact, such as a tarball, from the location determined by the
+   * `source-*` inputs. If `source-binary` is specified, this will return a path
+   * to a binary on disk; otherwise, the artifact will be downloaded from the
+   * URL determined by the other `source-*` inputs (`source-url`, `source-pr`,
+   * etc.).
+   */
+  async fetchArtifact() {
+    const sourceBinary = getStringOrNull("source-binary");
+    if (sourceBinary !== null && sourceBinary !== "") {
+      core.debug(`Using the provided source binary at ${sourceBinary}`);
+      return sourceBinary;
+    }
+    core.startGroup(
+      `Downloading ${this.actionOptions.name} for ${this.architectureFetchSuffix}`
+    );
+    try {
+      core.info(`Fetching from ${await this.getSourceUrl()}`);
+      const correlatedUrl = await this.getSourceUrl();
+      correlatedUrl.searchParams.set("ci", "github");
+      correlatedUrl.searchParams.set(
+        "correlation",
+        JSON.stringify(this.identity)
+      );
+      const versionCheckup = await (await this.getClient()).head(correlatedUrl);
+      if (versionCheckup.headers.etag) {
+        const v = versionCheckup.headers.etag;
+        this.addFact(FACT_SOURCE_URL_ETAG, v);
+        core.debug(
+          `Checking the tool cache for ${await this.getSourceUrl()} at ${v}`
+        );
+        const cached = await this.getCachedVersion(v);
+        if (cached) {
+          this.facts[FACT_ARTIFACT_FETCHED_FROM_CACHE] = true;
+          core.debug(`Tool cache hit.`);
+          return cached;
+        }
+      }
+      this.facts[FACT_ARTIFACT_FETCHED_FROM_CACHE] = false;
+      core.debug(
+        `No match from the cache, re-fetching from the redirect: ${versionCheckup.url}`
+      );
+      const destFile = this.getTemporaryName();
+      const fetchStream = await this.downloadFile(
+        new URL(versionCheckup.url),
+        destFile
+      );
+      if (fetchStream.response?.headers.etag) {
+        const v = fetchStream.response.headers.etag;
+        try {
+          await this.saveCachedVersion(v, destFile);
+        } catch (e) {
+          core.debug(`Error caching the artifact: ${stringifyError2(e)}`);
+        }
+      }
+      return destFile;
+    } catch (e) {
+      this.recordPlausibleTimeout(e);
+      throw e;
+    } finally {
+      core.endGroup();
+    }
+  }
+  /**
+   * A helper function for failing on error only if strict mode is enabled.
+   * This is intended only for CI environments testing Actions themselves.
+   */
+  failOnError(msg) {
+    if (this.strictMode) {
+      core.setFailed(`strict mode failure: ${msg}`);
+    }
+  }
+  async downloadFile(url, destination) {
+    const client = await this.getClient();
+    return new Promise((resolve, reject) => {
+      let writeStream;
+      let failed = false;
+      const retry = (stream) => {
+        if (writeStream) {
+          writeStream.destroy();
+        }
+        writeStream = (0,external_node_fs_namespaceObject.createWriteStream)(destination, {
+          encoding: "binary",
+          mode: 493
+        });
+        writeStream.once("error", (error3) => {
+          failed = true;
+          reject(error3);
+        });
+        writeStream.on("finish", () => {
+          if (!failed) {
+            resolve(stream);
+          }
+        });
+        stream.once("retry", (_count, _error, createRetryStream) => {
+          retry(createRetryStream());
+        });
+        stream.pipe(writeStream);
+      };
+      retry(client.stream(url));
+    });
+  }
+  async complete() {
+    this.recordEvent(`complete_${this.executionPhase}`);
+    await this.submitEvents();
+  }
+  async getCheckInUrl() {
+    const checkInUrl = await this.idsHost.getDynamicRootUrl();
+    if (checkInUrl === void 0) {
+      return void 0;
+    }
+    checkInUrl.pathname += "check-in";
+    return checkInUrl;
+  }
+  async getSourceUrl() {
+    const p = this.sourceParameters;
+    if (p.url) {
+      this.addFact(FACT_SOURCE_URL, p.url);
+      return new URL(p.url);
+    }
+    const fetchUrl = await this.idsHost.getRootUrl();
+    fetchUrl.pathname += this.actionOptions.idsProjectName;
+    if (p.tag) {
+      fetchUrl.pathname += `/tag/${p.tag}`;
+    } else if (p.pr) {
+      fetchUrl.pathname += `/pr/${p.pr}`;
+    } else if (p.branch) {
+      fetchUrl.pathname += `/branch/${p.branch}`;
+    } else if (p.revision) {
+      fetchUrl.pathname += `/rev/${p.revision}`;
+    } else {
+      fetchUrl.pathname += `/stable`;
+    }
+    fetchUrl.pathname += `/${this.architectureFetchSuffix}`;
+    this.addFact(FACT_SOURCE_URL, fetchUrl.toString());
+    return fetchUrl;
+  }
+  cacheKey(version2) {
+    const cleanedVersion = version2.replace(/[^a-zA-Z0-9-+.]/g, "");
+    return `determinatesystem-${this.actionOptions.name}-${this.architectureFetchSuffix}-${cleanedVersion}`;
+  }
+  async getCachedVersion(version2) {
+    const startCwd = process.cwd();
+    try {
+      const tempDir = this.getTemporaryName();
+      await (0,promises_namespaceObject.mkdir)(tempDir);
+      process.chdir(tempDir);
+      process.env.GITHUB_WORKSPACE_BACKUP = process.env.GITHUB_WORKSPACE;
+      delete process.env.GITHUB_WORKSPACE;
+      if (await cache.restoreCache(
+        [this.actionOptions.name],
+        this.cacheKey(version2),
+        [],
+        void 0,
+        true
+      )) {
+        this.recordEvent(EVENT_ARTIFACT_CACHE_HIT);
+        return `${tempDir}/${this.actionOptions.name}`;
+      }
+      this.recordEvent(EVENT_ARTIFACT_CACHE_MISS);
+      return void 0;
+    } finally {
+      process.env.GITHUB_WORKSPACE = process.env.GITHUB_WORKSPACE_BACKUP;
+      delete process.env.GITHUB_WORKSPACE_BACKUP;
+      process.chdir(startCwd);
+    }
+  }
+  async saveCachedVersion(version2, toolPath) {
+    const startCwd = process.cwd();
+    try {
+      const tempDir = this.getTemporaryName();
+      await (0,promises_namespaceObject.mkdir)(tempDir);
+      process.chdir(tempDir);
+      await (0,promises_namespaceObject.copyFile)(toolPath, `${tempDir}/${this.actionOptions.name}`);
+      process.env.GITHUB_WORKSPACE_BACKUP = process.env.GITHUB_WORKSPACE;
+      delete process.env.GITHUB_WORKSPACE;
+      await cache.saveCache(
+        [this.actionOptions.name],
+        this.cacheKey(version2),
+        void 0,
+        true
+      );
+      this.recordEvent(EVENT_ARTIFACT_CACHE_PERSIST);
+    } finally {
+      process.env.GITHUB_WORKSPACE = process.env.GITHUB_WORKSPACE_BACKUP;
+      delete process.env.GITHUB_WORKSPACE_BACKUP;
+      process.chdir(startCwd);
+    }
+  }
+  collectBacktraceSetup() {
+    if (process.env.DETSYS_BACKTRACE_COLLECTOR === "") {
+      core.exportVariable(
+        "DETSYS_BACKTRACE_COLLECTOR",
+        this.getCrossPhaseId()
+      );
+      core.saveState(STATE_BACKTRACE_START_TIMESTAMP, Date.now());
+    }
+  }
+  async collectBacktraces() {
+    try {
+      if (process.env.DETSYS_BACKTRACE_COLLECTOR !== this.getCrossPhaseId()) {
+        return;
+      }
+      const backtraces = await collectBacktraces(
+        this.actionOptions.binaryNamePrefixes,
+        parseInt(core.getState(STATE_BACKTRACE_START_TIMESTAMP))
+      );
+      core.debug(`Backtraces identified: ${backtraces.size}`);
+      if (backtraces.size > 0) {
+        this.recordEvent(EVENT_BACKTRACES, Object.fromEntries(backtraces));
+      }
+    } catch (innerError) {
+      core.debug(
+        `Error collecting backtraces: ${stringifyError2(innerError)}`
+      );
+    }
+  }
+  async preflightRequireNix() {
+    let nixLocation;
+    const pathParts = (process.env["PATH"] || "").split(":");
+    for (const location of pathParts) {
+      const candidateNix = external_node_path_namespaceObject.join(location, "nix");
+      try {
+        await promises_namespaceObject.access(candidateNix, promises_namespaceObject.constants.X_OK);
+        core.debug(`Found Nix at ${candidateNix}`);
+        nixLocation = candidateNix;
+        break;
+      } catch {
+        core.debug(`Nix not at ${candidateNix}`);
+      }
+    }
+    this.addFact(FACT_NIX_LOCATION, nixLocation || "");
+    if (this.actionOptions.requireNix === "ignore") {
+      return true;
+    }
+    const currentNotFoundState = core.getState(STATE_KEY_NIX_NOT_FOUND);
+    if (currentNotFoundState === STATE_NOT_FOUND) {
+      return false;
+    }
+    if (nixLocation !== void 0) {
+      return true;
+    }
+    core.saveState(STATE_KEY_NIX_NOT_FOUND, STATE_NOT_FOUND);
+    switch (this.actionOptions.requireNix) {
+      case "fail":
+        core.setFailed(
+          [
+            "This action can only be used when Nix is installed.",
+            "Add `- uses: DeterminateSystems/nix-installer-action@main` earlier in your workflow."
+          ].join(" ")
+        );
+        break;
+      case "warn":
+        core.warning(
+          [
+            "This action is in no-op mode because Nix is not installed.",
+            "Add `- uses: DeterminateSystems/nix-installer-action@main` earlier in your workflow."
+          ].join(" ")
+        );
+        break;
+    }
+    return false;
+  }
+  async preflightNixStoreInfo() {
+    let output = "";
+    const options = {};
+    options.silent = true;
+    options.listeners = {
+      stdout: (data) => {
+        output += data.toString();
+      }
+    };
+    try {
+      output = "";
+      await exec.exec("nix", ["store", "info", "--json"], options);
+      this.addFact(FACT_NIX_STORE_CHECK_METHOD, "info");
+    } catch {
+      try {
+        output = "";
+        await exec.exec("nix", ["store", "ping", "--json"], options);
+        this.addFact(FACT_NIX_STORE_CHECK_METHOD, "ping");
+      } catch {
+        this.addFact(FACT_NIX_STORE_CHECK_METHOD, "none");
+        return;
+      }
+    }
+    try {
+      const parsed = JSON.parse(output);
+      if (parsed.trusted === 1) {
+        this.nixStoreTrust = "trusted";
+      } else if (parsed.trusted === 0) {
+        this.nixStoreTrust = "untrusted";
+      } else if (parsed.trusted !== void 0) {
+        this.addFact(
+          FACT_NIX_STORE_CHECK_ERROR,
+          `Mysterious trusted value: ${JSON.stringify(parsed.trusted)}`
+        );
+      }
+      this.addFact(FACT_NIX_STORE_VERSION, JSON.stringify(parsed.version));
+    } catch (e) {
+      this.addFact(FACT_NIX_STORE_CHECK_ERROR, stringifyError2(e));
+    }
+  }
+  async submitEvents() {
+    const diagnosticsUrl = await this.idsHost.getDiagnosticsUrl();
+    if (diagnosticsUrl === void 0) {
+      core.debug(
+        "Diagnostics are disabled. Not sending the following events:"
+      );
+      core.debug(JSON.stringify(this.events, void 0, 2));
+      return;
+    }
+    const batch = {
+      type: "eventlog",
+      sent_at: /* @__PURE__ */ new Date(),
+      events: this.events
+    };
+    try {
+      await (await this.getClient()).post(diagnosticsUrl, {
+        json: batch,
+        timeout: {
+          request: DIAGNOSTIC_ENDPOINT_TIMEOUT_MS
+        }
+      });
+    } catch (err) {
+      this.recordPlausibleTimeout(err);
+      core.debug(
+        `Error submitting diagnostics event to ${diagnosticsUrl}: ${stringifyError2(err)}`
+      );
+    }
+    this.events = [];
+  }
+};
+function stringifyError2(error3) {
+  return error3 instanceof Error || typeof error3 == "string" ? error3.toString() : JSON.stringify(error3);
+}
+function makeOptionsConfident(actionOptions) {
+  const idsProjectName = actionOptions.idsProjectName ?? actionOptions.name;
+  const finalOpts = {
+    name: actionOptions.name,
+    idsProjectName,
+    eventPrefix: actionOptions.eventPrefix || "action:",
+    fetchStyle: actionOptions.fetchStyle,
+    legacySourcePrefix: actionOptions.legacySourcePrefix,
+    requireNix: actionOptions.requireNix,
+    binaryNamePrefixes: actionOptions.binaryNamePrefixes ?? [
+      "nix",
+      "determinate-nixd",
+      actionOptions.name
+    ]
+  };
+  core.debug("idslib options:");
+  core.debug(JSON.stringify(finalOpts, void 0, 2));
+  return finalOpts;
+}
+
+/*!
+ * linux-release-info
+ * Get Linux release info (distribution name, version, arch, release, etc.)
+ * from '/etc/os-release' or '/usr/lib/os-release' files and from native os
+ * module. On Windows and Darwin platforms it only returns common node os module
+ * info (platform, hostname, release, and arch)
+ *
+ * Licensed under MIT
+ * Copyright (c) 2018-2020 [Samuel Carreira]
+ */
+//# sourceMappingURL=index.js.map
+;// CONCATENATED MODULE: ./dist/index.js
+// src/nix.ts
+function makeNixCommandArgs(nixOptions, flakeInputs, commitMessage) {
+  const flakeInputFlags = flakeInputs.flatMap((input) => [
+    "--update-input",
+    input
+  ]);
+  const lockfileSummaryFlags = [
+    "--option",
+    "commit-lockfile-summary",
+    commitMessage
+  ];
+  const updateLockMechanism = flakeInputFlags.length === 0 ? "update" : "lock";
+  return nixOptions.concat(["flake", updateLockMechanism]).concat(flakeInputFlags).concat(["--commit-lock-file"]).concat(lockfileSummaryFlags);
+}
+
+// src/index.ts
+
+
+
+var EVENT_EXECUTION_FAILURE = "execution_failure";
+var UpdateFlakeLockAction = class extends DetSysAction {
+  constructor() {
+    super({
+      name: "update-flake-lock",
+      fetchStyle: "universal",
+      requireNix: "fail"
+    });
+    this.commitMessage = inputs_exports.getString("commit-msg");
+    this.flakeInputs = inputs_exports.getArrayOfStrings("inputs", "space");
+    this.nixOptions = inputs_exports.getArrayOfStrings("nix-options", "space");
+    this.pathToFlakeDir = inputs_exports.getStringOrNull("path-to-flake-dir");
+  }
+  async main() {
+    await this.update();
+  }
+  // No post phase
+  async post() {
+  }
+  async update() {
+    const nixCommandArgs = makeNixCommandArgs(
+      this.nixOptions,
+      this.flakeInputs,
+      this.commitMessage
+    );
+    core.debug(
+      JSON.stringify({
+        options: this.nixOptions,
+        inputs: this.flakeInputs,
+        message: this.commitMessage,
+        args: nixCommandArgs
+      })
+    );
+    const execOptions = {
+      cwd: this.pathToFlakeDir !== null ? this.pathToFlakeDir : void 0
+    };
+    const exitCode = await exec.exec("nix", nixCommandArgs, execOptions);
+    if (exitCode !== 0) {
+      this.recordEvent(EVENT_EXECUTION_FAILURE, {
+        exitCode
+      });
+      core.setFailed(`non-zero exit code of ${exitCode} detected`);
+    } else {
+      core.info(`flake.lock file was successfully updated`);
+    }
+  }
+};
+function main() {
+  new UpdateFlakeLockAction().execute();
+}
+main();
+//# sourceMappingURL=index.js.map
diff --git a/dist/index.js.map b/dist/index.js.map
new file mode 100644
index 0000000..faa1733
--- /dev/null
+++ b/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"sources":["../src/nix.ts","../src/index.ts"],"sourcesContent":["// Build the Nix args out of inputs from the Actions environment\nexport function makeNixCommandArgs(\n  nixOptions: string[],\n  flakeInputs: string[],\n  commitMessage: string,\n): string[] {\n  const flakeInputFlags = flakeInputs.flatMap((input) => [\n    \"--update-input\",\n    input,\n  ]);\n\n  // NOTE(cole-h): In Nix versions 2.23.0 and later, `commit-lockfile-summary` became an alias to\n  // the setting `commit-lock-file-summary` (https://github.com/NixOS/nix/pull/10691), and Nix does\n  // not treat aliases the same as their \"real\" setting by requiring setting aliases to be\n  // configured via `--option <alias name> <option value>`\n  // (https://github.com/NixOS/nix/issues/10989).\n  // So, we go the long way so that we can support versions both before and after Nix 2.23.0.\n  const lockfileSummaryFlags = [\n    \"--option\",\n    \"commit-lockfile-summary\",\n    commitMessage,\n  ];\n\n  const updateLockMechanism = flakeInputFlags.length === 0 ? \"update\" : \"lock\";\n\n  return nixOptions\n    .concat([\"flake\", updateLockMechanism])\n    .concat(flakeInputFlags)\n    .concat([\"--commit-lock-file\"])\n    .concat(lockfileSummaryFlags);\n}\n","import { makeNixCommandArgs } from \"./nix.js\";\nimport * as actionsCore from \"@actions/core\";\nimport * as actionsExec from \"@actions/exec\";\nimport { DetSysAction, inputs } from \"detsys-ts\";\n\nconst EVENT_EXECUTION_FAILURE = \"execution_failure\";\n\nclass UpdateFlakeLockAction extends DetSysAction {\n  private commitMessage: string;\n  private nixOptions: string[];\n  private flakeInputs: string[];\n  private pathToFlakeDir: string | null;\n\n  constructor() {\n    super({\n      name: \"update-flake-lock\",\n      fetchStyle: \"universal\",\n      requireNix: \"fail\",\n    });\n\n    this.commitMessage = inputs.getString(\"commit-msg\");\n    this.flakeInputs = inputs.getArrayOfStrings(\"inputs\", \"space\");\n    this.nixOptions = inputs.getArrayOfStrings(\"nix-options\", \"space\");\n    this.pathToFlakeDir = inputs.getStringOrNull(\"path-to-flake-dir\");\n  }\n\n  async main(): Promise<void> {\n    await this.update();\n  }\n\n  // No post phase\n  async post(): Promise<void> {}\n\n  async update(): Promise<void> {\n    // Nix command of this form:\n    // nix ${maybe nix options} flake ${\"update\" or \"lock\"} ${maybe --update-input flags} --commit-lock-file --commit-lockfile-summary ${commit message}\n    // Example commands:\n    // nix --extra-substituters https://example.com flake lock --update-input nixpkgs --commit-lock-file --commit-lockfile-summary \"updated flake.lock\"\n    // nix flake update --commit-lock-file --commit-lockfile-summary \"updated flake.lock\"\n    const nixCommandArgs: string[] = makeNixCommandArgs(\n      this.nixOptions,\n      this.flakeInputs,\n      this.commitMessage,\n    );\n\n    actionsCore.debug(\n      JSON.stringify({\n        options: this.nixOptions,\n        inputs: this.flakeInputs,\n        message: this.commitMessage,\n        args: nixCommandArgs,\n      }),\n    );\n\n    const execOptions: actionsExec.ExecOptions = {\n      cwd: this.pathToFlakeDir !== null ? this.pathToFlakeDir : undefined,\n    };\n\n    const exitCode = await actionsExec.exec(\"nix\", nixCommandArgs, execOptions);\n\n    if (exitCode !== 0) {\n      this.recordEvent(EVENT_EXECUTION_FAILURE, {\n        exitCode,\n      });\n      actionsCore.setFailed(`non-zero exit code of ${exitCode} detected`);\n    } else {\n      actionsCore.info(`flake.lock file was successfully updated`);\n    }\n  }\n}\n\nfunction main(): void {\n  new UpdateFlakeLockAction().execute();\n}\n\nmain();\n"],"mappings":";AACO,SAAS,mBACd,YACA,aACA,eACU;AACV,QAAM,kBAAkB,YAAY,QAAQ,CAAC,UAAU;AAAA,IACrD;AAAA,IACA;AAAA,EACF,CAAC;AAQD,QAAM,uBAAuB;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,sBAAsB,gBAAgB,WAAW,IAAI,WAAW;AAEtE,SAAO,WACJ,OAAO,CAAC,SAAS,mBAAmB,CAAC,EACrC,OAAO,eAAe,EACtB,OAAO,CAAC,oBAAoB,CAAC,EAC7B,OAAO,oBAAoB;AAChC;;;AC7BA,YAAY,iBAAiB;AAC7B,YAAY,iBAAiB;AAC7B,SAAS,cAAc,cAAc;AAErC,IAAM,0BAA0B;AAEhC,IAAM,wBAAN,cAAoC,aAAa;AAAA,EAM/C,cAAc;AACZ,UAAM;AAAA,MACJ,MAAM;AAAA,MACN,YAAY;AAAA,MACZ,YAAY;AAAA,IACd,CAAC;AAED,SAAK,gBAAgB,OAAO,UAAU,YAAY;AAClD,SAAK,cAAc,OAAO,kBAAkB,UAAU,OAAO;AAC7D,SAAK,aAAa,OAAO,kBAAkB,eAAe,OAAO;AACjE,SAAK,iBAAiB,OAAO,gBAAgB,mBAAmB;AAAA,EAClE;AAAA,EAEA,MAAM,OAAsB;AAC1B,UAAM,KAAK,OAAO;AAAA,EACpB;AAAA;AAAA,EAGA,MAAM,OAAsB;AAAA,EAAC;AAAA,EAE7B,MAAM,SAAwB;AAM5B,UAAM,iBAA2B;AAAA,MAC/B,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAEA,IAAY;AAAA,MACV,KAAK,UAAU;AAAA,QACb,SAAS,KAAK;AAAA,QACd,QAAQ,KAAK;AAAA,QACb,SAAS,KAAK;AAAA,QACd,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAEA,UAAM,cAAuC;AAAA,MAC3C,KAAK,KAAK,mBAAmB,OAAO,KAAK,iBAAiB;AAAA,IAC5D;AAEA,UAAM,WAAW,MAAkB,iBAAK,OAAO,gBAAgB,WAAW;AAE1E,QAAI,aAAa,GAAG;AAClB,WAAK,YAAY,yBAAyB;AAAA,QACxC;AAAA,MACF,CAAC;AACD,MAAY,sBAAU,yBAAyB,QAAQ,WAAW;AAAA,IACpE,OAAO;AACL,MAAY,iBAAK,0CAA0C;AAAA,IAC7D;AAAA,EACF;AACF;AAEA,SAAS,OAAa;AACpB,MAAI,sBAAsB,EAAE,QAAQ;AACtC;AAEA,KAAK;","names":[]}
\ No newline at end of file
diff --git a/dist/package.json b/dist/package.json
new file mode 100644
index 0000000..3dbc1ca
--- /dev/null
+++ b/dist/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/flake.lock b/flake.lock
new file mode 100644
index 0000000..8e667b9
--- /dev/null
+++ b/flake.lock
@@ -0,0 +1,25 @@
+{
+  "nodes": {
+    "nixpkgs": {
+      "locked": {
+        "lastModified": 1713537308,
+        "narHash": "sha256-XtTSSIB2DA6tOv+l0FhvfDMiyCmhoRbNB+0SeInZkbk=",
+        "rev": "5c24cf2f0a12ad855f444c30b2421d044120c66f",
+        "revCount": 614481,
+        "type": "tarball",
+        "url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.1.614481%2Brev-5c24cf2f0a12ad855f444c30b2421d044120c66f/018efa00-a443-7f41-b371-ce568b5c7e9f/source.tar.gz"
+      },
+      "original": {
+        "type": "tarball",
+        "url": "https://flakehub.com/f/NixOS/nixpkgs/0.1.%2A.tar.gz"
+      }
+    },
+    "root": {
+      "inputs": {
+        "nixpkgs": "nixpkgs"
+      }
+    }
+  },
+  "root": "root",
+  "version": 7
+}
diff --git a/flake.nix b/flake.nix
new file mode 100644
index 0000000..d968aa3
--- /dev/null
+++ b/flake.nix
@@ -0,0 +1,23 @@
+{
+  description = "update-flake-lock";
+
+  inputs.nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.*.tar.gz";
+
+  outputs = { self, nixpkgs }:
+    let
+      supportedSystems = [ "x86_64-linux" "aarch64-darwin" "aarch64-linux" "x86_64-darwin" ];
+      forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f {
+        pkgs = import nixpkgs { inherit system; };
+      });
+    in
+    {
+      devShells = forEachSupportedSystem ({ pkgs }: {
+        default = pkgs.mkShell {
+          packages = with pkgs; [
+            nodejs_latest
+            nodePackages_latest.pnpm
+          ];
+        };
+      });
+    };
+}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..5218593
--- /dev/null
+++ b/package.json
@@ -0,0 +1,47 @@
+{
+  "name": "update-flake-lock",
+  "version": "1.0.0",
+  "description": "",
+  "main": "./dist/index.js",
+  "types": "./dist/index.d.ts",
+  "type": "module",
+  "scripts": {
+    "build": "tsup",
+    "format": "prettier --write .",
+    "check-fmt": "prettier --check .",
+    "lint": "eslint src/**/*.ts --ignore-pattern *.test.ts",
+    "package": "ncc build",
+    "test": "vitest --watch false",
+    "all": "pnpm run format && pnpm run lint && pnpm run build && pnpm run package"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/DeterminateSystems/update-flake-lock.git"
+  },
+  "keywords": [],
+  "author": "",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/DeterminateSystems/update-flake-lock/issues"
+  },
+  "homepage": "https://github.com/DeterminateSystems/update-flake-lock#readme",
+  "dependencies": {
+    "@actions/core": "^1.11.1",
+    "@actions/exec": "^1.1.1",
+    "detsys-ts": "github:DeterminateSystems/detsys-ts"
+  },
+  "devDependencies": {
+    "@trivago/prettier-plugin-sort-imports": "^4.3.0",
+    "@typescript-eslint/eslint-plugin": "^7.18.0",
+    "@vercel/ncc": "^0.38.3",
+    "eslint": "^8.57.1",
+    "eslint-import-resolver-typescript": "^3.6.3",
+    "eslint-plugin-github": "^4.10.2",
+    "eslint-plugin-import": "^2.31.0",
+    "eslint-plugin-prettier": "^5.2.1",
+    "prettier": "^3.3.3",
+    "tsup": "^8.3.5",
+    "typescript": "^5.6.3",
+    "vitest": "^1.6.0"
+  }
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
new file mode 100644
index 0000000..32c9b7a
--- /dev/null
+++ b/pnpm-lock.yaml
@@ -0,0 +1,4340 @@
+lockfileVersion: '6.0'
+
+settings:
+  autoInstallPeers: true
+  excludeLinksFromLockfile: false
+
+dependencies:
+  '@actions/core':
+    specifier: ^1.11.1
+    version: 1.11.1
+  '@actions/exec':
+    specifier: ^1.1.1
+    version: 1.1.1
+  detsys-ts:
+    specifier: github:DeterminateSystems/detsys-ts
+    version: github.com/DeterminateSystems/detsys-ts/eb87094f35072ac911526ad052c3437c9e0c42d6
+
+devDependencies:
+  '@trivago/prettier-plugin-sort-imports':
+    specifier: ^4.3.0
+    version: 4.3.0(prettier@3.3.3)
+  '@typescript-eslint/eslint-plugin':
+    specifier: ^7.18.0
+    version: 7.18.0(@typescript-eslint/parser@7.18.0)(eslint@8.57.1)(typescript@5.6.3)
+  '@vercel/ncc':
+    specifier: ^0.38.3
+    version: 0.38.3
+  eslint:
+    specifier: ^8.57.1
+    version: 8.57.1
+  eslint-import-resolver-typescript:
+    specifier: ^3.6.3
+    version: 3.6.3(@typescript-eslint/parser@7.18.0)(eslint-plugin-import@2.31.0)(eslint@8.57.1)
+  eslint-plugin-github:
+    specifier: ^4.10.2
+    version: 4.10.2(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)(typescript@5.6.3)
+  eslint-plugin-import:
+    specifier: ^2.31.0
+    version: 2.31.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)
+  eslint-plugin-prettier:
+    specifier: ^5.2.1
+    version: 5.2.1(eslint-config-prettier@9.1.0)(eslint@8.57.1)(prettier@3.3.3)
+  prettier:
+    specifier: ^3.3.3
+    version: 3.3.3
+  tsup:
+    specifier: ^8.3.5
+    version: 8.3.5(typescript@5.6.3)
+  typescript:
+    specifier: ^5.6.3
+    version: 5.6.3
+  vitest:
+    specifier: ^1.6.0
+    version: 1.6.0
+
+packages:
+
+  /@actions/cache@3.3.0:
+    resolution: {integrity: sha512-+eCsMTIZUEm+QA9GqjollOhCdvRrZ1JV8d9Rp34zVNizBkYITO8dhKczP5Xps1dFzc5n59p7vYVtZrGt18bb5Q==}
+    dependencies:
+      '@actions/core': 1.11.1
+      '@actions/exec': 1.1.1
+      '@actions/glob': 0.1.2
+      '@actions/http-client': 2.2.3
+      '@actions/io': 1.1.3
+      '@azure/abort-controller': 1.1.0
+      '@azure/ms-rest-js': 2.7.0
+      '@azure/storage-blob': 12.26.0
+      semver: 6.3.1
+    transitivePeerDependencies:
+      - encoding
+      - supports-color
+    dev: false
+
+  /@actions/core@1.11.1:
+    resolution: {integrity: sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==}
+    dependencies:
+      '@actions/exec': 1.1.1
+      '@actions/http-client': 2.2.3
+    dev: false
+
+  /@actions/exec@1.1.1:
+    resolution: {integrity: sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==}
+    dependencies:
+      '@actions/io': 1.1.3
+    dev: false
+
+  /@actions/glob@0.1.2:
+    resolution: {integrity: sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==}
+    dependencies:
+      '@actions/core': 1.11.1
+      minimatch: 3.1.2
+    dev: false
+
+  /@actions/http-client@2.2.3:
+    resolution: {integrity: sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==}
+    dependencies:
+      tunnel: 0.0.6
+      undici: 5.28.4
+    dev: false
+
+  /@actions/io@1.1.3:
+    resolution: {integrity: sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==}
+    dev: false
+
+  /@azure/abort-controller@1.1.0:
+    resolution: {integrity: sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==}
+    engines: {node: '>=12.0.0'}
+    dependencies:
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/abort-controller@2.1.2:
+    resolution: {integrity: sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/core-auth@1.9.0:
+    resolution: {integrity: sha512-FPwHpZywuyasDSLMqJ6fhbOK3TqUdviZNF8OqRGA4W5Ewib2lEEZ+pBsYcBa88B2NGO/SEnYPGhyBqNlE8ilSw==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      '@azure/core-util': 1.11.0
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/core-client@1.9.2:
+    resolution: {integrity: sha512-kRdry/rav3fUKHl/aDLd/pDLcB+4pOFwPPTVEExuMyaI5r+JBbMWqRbCY1pn5BniDaU3lRxO9eaQ1AmSMehl/w==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      '@azure/core-auth': 1.9.0
+      '@azure/core-rest-pipeline': 1.18.0
+      '@azure/core-tracing': 1.2.0
+      '@azure/core-util': 1.11.0
+      '@azure/logger': 1.1.4
+      tslib: 2.8.1
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /@azure/core-http-compat@2.1.2:
+    resolution: {integrity: sha512-5MnV1yqzZwgNLLjlizsU3QqOeQChkIXw781Fwh1xdAqJR5AA32IUaq6xv1BICJvfbHoa+JYcaij2HFkhLbNTJQ==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      '@azure/core-client': 1.9.2
+      '@azure/core-rest-pipeline': 1.18.0
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /@azure/core-lro@2.7.2:
+    resolution: {integrity: sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      '@azure/core-util': 1.11.0
+      '@azure/logger': 1.1.4
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/core-paging@1.6.2:
+    resolution: {integrity: sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/core-rest-pipeline@1.18.0:
+    resolution: {integrity: sha512-QSoGUp4Eq/gohEFNJaUOwTN7BCc2nHTjjbm75JT0aD7W65PWM1H/tItz0GsABn22uaKyGxiMhWQLt2r+FGU89Q==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      '@azure/core-auth': 1.9.0
+      '@azure/core-tracing': 1.2.0
+      '@azure/core-util': 1.11.0
+      '@azure/logger': 1.1.4
+      http-proxy-agent: 7.0.2
+      https-proxy-agent: 7.0.5
+      tslib: 2.8.1
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /@azure/core-tracing@1.2.0:
+    resolution: {integrity: sha512-UKTiEJPkWcESPYJz3X5uKRYyOcJD+4nYph+KpfdPRnQJVrZfk0KJgdnaAWKfhsBBtAf/D58Az4AvCJEmWgIBAg==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/core-util@1.11.0:
+    resolution: {integrity: sha512-DxOSLua+NdpWoSqULhjDyAZTXFdP/LKkqtYuxxz1SCN289zk3OG8UOpnCQAz/tygyACBtWp/BoO72ptK7msY8g==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/core-xml@1.4.4:
+    resolution: {integrity: sha512-J4FYAqakGXcbfeZjwjMzjNcpcH4E+JtEBv+xcV1yL0Ydn/6wbQfeFKTCHh9wttAi0lmajHw7yBbHPRG+YHckZQ==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      fast-xml-parser: 4.5.0
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/logger@1.1.4:
+    resolution: {integrity: sha512-4IXXzcCdLdlXuCG+8UKEwLA1T1NHqUfanhXYHiQTn+6sfWCZXduqbtXDGceg3Ce5QxTGo7EqmbV6Bi+aqKuClQ==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      tslib: 2.8.1
+    dev: false
+
+  /@azure/ms-rest-js@2.7.0:
+    resolution: {integrity: sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==}
+    dependencies:
+      '@azure/core-auth': 1.9.0
+      abort-controller: 3.0.0
+      form-data: 2.5.2
+      node-fetch: 2.7.0
+      tslib: 1.14.1
+      tunnel: 0.0.6
+      uuid: 8.3.2
+      xml2js: 0.5.0
+    transitivePeerDependencies:
+      - encoding
+    dev: false
+
+  /@azure/storage-blob@12.26.0:
+    resolution: {integrity: sha512-SriLPKezypIsiZ+TtlFfE46uuBIap2HeaQVS78e1P7rz5OSbq0rsd52WE1mC5f7vAeLiXqv7I7oRhL3WFZEw3Q==}
+    engines: {node: '>=18.0.0'}
+    dependencies:
+      '@azure/abort-controller': 2.1.2
+      '@azure/core-auth': 1.9.0
+      '@azure/core-client': 1.9.2
+      '@azure/core-http-compat': 2.1.2
+      '@azure/core-lro': 2.7.2
+      '@azure/core-paging': 1.6.2
+      '@azure/core-rest-pipeline': 1.18.0
+      '@azure/core-tracing': 1.2.0
+      '@azure/core-util': 1.11.0
+      '@azure/core-xml': 1.4.4
+      '@azure/logger': 1.1.4
+      events: 3.3.0
+      tslib: 2.8.1
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /@babel/code-frame@7.26.2:
+    resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/helper-validator-identifier': 7.25.9
+      js-tokens: 4.0.0
+      picocolors: 1.1.1
+    dev: true
+
+  /@babel/generator@7.17.7:
+    resolution: {integrity: sha512-oLcVCTeIFadUoArDTwpluncplrYBmTCCZZgXCbgNGvOBBiSDDK3eWO4b/+eOTli5tKv1lg+a5/NAXg+nTcei1w==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/types': 7.17.0
+      jsesc: 2.5.2
+      source-map: 0.5.7
+    dev: true
+
+  /@babel/generator@7.26.2:
+    resolution: {integrity: sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/parser': 7.26.2
+      '@babel/types': 7.26.0
+      '@jridgewell/gen-mapping': 0.3.5
+      '@jridgewell/trace-mapping': 0.3.25
+      jsesc: 3.0.2
+    dev: true
+
+  /@babel/helper-environment-visitor@7.24.7:
+    resolution: {integrity: sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/types': 7.26.0
+    dev: true
+
+  /@babel/helper-function-name@7.24.7:
+    resolution: {integrity: sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/template': 7.25.9
+      '@babel/types': 7.26.0
+    dev: true
+
+  /@babel/helper-hoist-variables@7.24.7:
+    resolution: {integrity: sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/types': 7.26.0
+    dev: true
+
+  /@babel/helper-split-export-declaration@7.24.7:
+    resolution: {integrity: sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/types': 7.26.0
+    dev: true
+
+  /@babel/helper-string-parser@7.25.9:
+    resolution: {integrity: sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==}
+    engines: {node: '>=6.9.0'}
+    dev: true
+
+  /@babel/helper-validator-identifier@7.25.9:
+    resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==}
+    engines: {node: '>=6.9.0'}
+    dev: true
+
+  /@babel/parser@7.26.2:
+    resolution: {integrity: sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ==}
+    engines: {node: '>=6.0.0'}
+    hasBin: true
+    dependencies:
+      '@babel/types': 7.26.0
+    dev: true
+
+  /@babel/template@7.25.9:
+    resolution: {integrity: sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/code-frame': 7.26.2
+      '@babel/parser': 7.26.2
+      '@babel/types': 7.26.0
+    dev: true
+
+  /@babel/traverse@7.23.2:
+    resolution: {integrity: sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/code-frame': 7.26.2
+      '@babel/generator': 7.26.2
+      '@babel/helper-environment-visitor': 7.24.7
+      '@babel/helper-function-name': 7.24.7
+      '@babel/helper-hoist-variables': 7.24.7
+      '@babel/helper-split-export-declaration': 7.24.7
+      '@babel/parser': 7.26.2
+      '@babel/types': 7.26.0
+      debug: 4.3.7
+      globals: 11.12.0
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@babel/types@7.17.0:
+    resolution: {integrity: sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/helper-validator-identifier': 7.25.9
+      to-fast-properties: 2.0.0
+    dev: true
+
+  /@babel/types@7.26.0:
+    resolution: {integrity: sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==}
+    engines: {node: '>=6.9.0'}
+    dependencies:
+      '@babel/helper-string-parser': 7.25.9
+      '@babel/helper-validator-identifier': 7.25.9
+    dev: true
+
+  /@esbuild/aix-ppc64@0.21.5:
+    resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==}
+    engines: {node: '>=12'}
+    cpu: [ppc64]
+    os: [aix]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/aix-ppc64@0.24.0:
+    resolution: {integrity: sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==}
+    engines: {node: '>=18'}
+    cpu: [ppc64]
+    os: [aix]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/android-arm64@0.21.5:
+    resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==}
+    engines: {node: '>=12'}
+    cpu: [arm64]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/android-arm64@0.24.0:
+    resolution: {integrity: sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==}
+    engines: {node: '>=18'}
+    cpu: [arm64]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/android-arm@0.21.5:
+    resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==}
+    engines: {node: '>=12'}
+    cpu: [arm]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/android-arm@0.24.0:
+    resolution: {integrity: sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==}
+    engines: {node: '>=18'}
+    cpu: [arm]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/android-x64@0.21.5:
+    resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/android-x64@0.24.0:
+    resolution: {integrity: sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/darwin-arm64@0.21.5:
+    resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==}
+    engines: {node: '>=12'}
+    cpu: [arm64]
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/darwin-arm64@0.24.0:
+    resolution: {integrity: sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==}
+    engines: {node: '>=18'}
+    cpu: [arm64]
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/darwin-x64@0.21.5:
+    resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/darwin-x64@0.24.0:
+    resolution: {integrity: sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/freebsd-arm64@0.21.5:
+    resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==}
+    engines: {node: '>=12'}
+    cpu: [arm64]
+    os: [freebsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/freebsd-arm64@0.24.0:
+    resolution: {integrity: sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==}
+    engines: {node: '>=18'}
+    cpu: [arm64]
+    os: [freebsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/freebsd-x64@0.21.5:
+    resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [freebsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/freebsd-x64@0.24.0:
+    resolution: {integrity: sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [freebsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-arm64@0.21.5:
+    resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==}
+    engines: {node: '>=12'}
+    cpu: [arm64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-arm64@0.24.0:
+    resolution: {integrity: sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==}
+    engines: {node: '>=18'}
+    cpu: [arm64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-arm@0.21.5:
+    resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==}
+    engines: {node: '>=12'}
+    cpu: [arm]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-arm@0.24.0:
+    resolution: {integrity: sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==}
+    engines: {node: '>=18'}
+    cpu: [arm]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-ia32@0.21.5:
+    resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==}
+    engines: {node: '>=12'}
+    cpu: [ia32]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-ia32@0.24.0:
+    resolution: {integrity: sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==}
+    engines: {node: '>=18'}
+    cpu: [ia32]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-loong64@0.21.5:
+    resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==}
+    engines: {node: '>=12'}
+    cpu: [loong64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-loong64@0.24.0:
+    resolution: {integrity: sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==}
+    engines: {node: '>=18'}
+    cpu: [loong64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-mips64el@0.21.5:
+    resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==}
+    engines: {node: '>=12'}
+    cpu: [mips64el]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-mips64el@0.24.0:
+    resolution: {integrity: sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==}
+    engines: {node: '>=18'}
+    cpu: [mips64el]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-ppc64@0.21.5:
+    resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==}
+    engines: {node: '>=12'}
+    cpu: [ppc64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-ppc64@0.24.0:
+    resolution: {integrity: sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==}
+    engines: {node: '>=18'}
+    cpu: [ppc64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-riscv64@0.21.5:
+    resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==}
+    engines: {node: '>=12'}
+    cpu: [riscv64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-riscv64@0.24.0:
+    resolution: {integrity: sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==}
+    engines: {node: '>=18'}
+    cpu: [riscv64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-s390x@0.21.5:
+    resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==}
+    engines: {node: '>=12'}
+    cpu: [s390x]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-s390x@0.24.0:
+    resolution: {integrity: sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==}
+    engines: {node: '>=18'}
+    cpu: [s390x]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-x64@0.21.5:
+    resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/linux-x64@0.24.0:
+    resolution: {integrity: sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/netbsd-x64@0.21.5:
+    resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [netbsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/netbsd-x64@0.24.0:
+    resolution: {integrity: sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [netbsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/openbsd-arm64@0.24.0:
+    resolution: {integrity: sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==}
+    engines: {node: '>=18'}
+    cpu: [arm64]
+    os: [openbsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/openbsd-x64@0.21.5:
+    resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [openbsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/openbsd-x64@0.24.0:
+    resolution: {integrity: sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [openbsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/sunos-x64@0.21.5:
+    resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [sunos]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/sunos-x64@0.24.0:
+    resolution: {integrity: sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [sunos]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/win32-arm64@0.21.5:
+    resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==}
+    engines: {node: '>=12'}
+    cpu: [arm64]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/win32-arm64@0.24.0:
+    resolution: {integrity: sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==}
+    engines: {node: '>=18'}
+    cpu: [arm64]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/win32-ia32@0.21.5:
+    resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==}
+    engines: {node: '>=12'}
+    cpu: [ia32]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/win32-ia32@0.24.0:
+    resolution: {integrity: sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==}
+    engines: {node: '>=18'}
+    cpu: [ia32]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/win32-x64@0.21.5:
+    resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==}
+    engines: {node: '>=12'}
+    cpu: [x64]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@esbuild/win32-x64@0.24.0:
+    resolution: {integrity: sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==}
+    engines: {node: '>=18'}
+    cpu: [x64]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@eslint-community/eslint-utils@4.4.1(eslint@8.57.1):
+    resolution: {integrity: sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    peerDependencies:
+      eslint: ^6.0.0 || ^7.0.0 || >=8.0.0
+    dependencies:
+      eslint: 8.57.1
+      eslint-visitor-keys: 3.4.3
+    dev: true
+
+  /@eslint-community/regexpp@4.12.1:
+    resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==}
+    engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0}
+    dev: true
+
+  /@eslint/eslintrc@2.1.4:
+    resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    dependencies:
+      ajv: 6.12.6
+      debug: 4.3.7
+      espree: 9.6.1
+      globals: 13.24.0
+      ignore: 5.3.2
+      import-fresh: 3.3.0
+      js-yaml: 4.1.0
+      minimatch: 3.1.2
+      strip-json-comments: 3.1.1
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@eslint/js@8.57.1:
+    resolution: {integrity: sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    dev: true
+
+  /@fastify/busboy@2.1.1:
+    resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==}
+    engines: {node: '>=14'}
+    dev: false
+
+  /@github/browserslist-config@1.0.0:
+    resolution: {integrity: sha512-gIhjdJp/c2beaIWWIlsXdqXVRUz3r2BxBCpfz/F3JXHvSAQ1paMYjLH+maEATtENg+k5eLV7gA+9yPp762ieuw==}
+    dev: true
+
+  /@humanwhocodes/config-array@0.13.0:
+    resolution: {integrity: sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==}
+    engines: {node: '>=10.10.0'}
+    deprecated: Use @eslint/config-array instead
+    dependencies:
+      '@humanwhocodes/object-schema': 2.0.3
+      debug: 4.3.7
+      minimatch: 3.1.2
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@humanwhocodes/module-importer@1.0.1:
+    resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==}
+    engines: {node: '>=12.22'}
+    dev: true
+
+  /@humanwhocodes/object-schema@2.0.3:
+    resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==}
+    deprecated: Use @eslint/object-schema instead
+    dev: true
+
+  /@isaacs/cliui@8.0.2:
+    resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}
+    engines: {node: '>=12'}
+    dependencies:
+      string-width: 5.1.2
+      string-width-cjs: /string-width@4.2.3
+      strip-ansi: 7.1.0
+      strip-ansi-cjs: /strip-ansi@6.0.1
+      wrap-ansi: 8.1.0
+      wrap-ansi-cjs: /wrap-ansi@7.0.0
+    dev: true
+
+  /@jest/schemas@29.6.3:
+    resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==}
+    engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+    dependencies:
+      '@sinclair/typebox': 0.27.8
+    dev: true
+
+  /@jridgewell/gen-mapping@0.3.5:
+    resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==}
+    engines: {node: '>=6.0.0'}
+    dependencies:
+      '@jridgewell/set-array': 1.2.1
+      '@jridgewell/sourcemap-codec': 1.5.0
+      '@jridgewell/trace-mapping': 0.3.25
+    dev: true
+
+  /@jridgewell/resolve-uri@3.1.2:
+    resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
+    engines: {node: '>=6.0.0'}
+    dev: true
+
+  /@jridgewell/set-array@1.2.1:
+    resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==}
+    engines: {node: '>=6.0.0'}
+    dev: true
+
+  /@jridgewell/sourcemap-codec@1.5.0:
+    resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==}
+    dev: true
+
+  /@jridgewell/trace-mapping@0.3.25:
+    resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==}
+    dependencies:
+      '@jridgewell/resolve-uri': 3.1.2
+      '@jridgewell/sourcemap-codec': 1.5.0
+    dev: true
+
+  /@nodelib/fs.scandir@2.1.5:
+    resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
+    engines: {node: '>= 8'}
+    dependencies:
+      '@nodelib/fs.stat': 2.0.5
+      run-parallel: 1.2.0
+    dev: true
+
+  /@nodelib/fs.stat@2.0.5:
+    resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
+    engines: {node: '>= 8'}
+    dev: true
+
+  /@nodelib/fs.walk@1.2.8:
+    resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
+    engines: {node: '>= 8'}
+    dependencies:
+      '@nodelib/fs.scandir': 2.1.5
+      fastq: 1.17.1
+    dev: true
+
+  /@nolyfill/is-core-module@1.0.39:
+    resolution: {integrity: sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==}
+    engines: {node: '>=12.4.0'}
+    dev: true
+
+  /@pkgjs/parseargs@0.11.0:
+    resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}
+    engines: {node: '>=14'}
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@pkgr/core@0.1.1:
+    resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==}
+    engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
+    dev: true
+
+  /@rollup/rollup-android-arm-eabi@4.27.3:
+    resolution: {integrity: sha512-EzxVSkIvCFxUd4Mgm4xR9YXrcp976qVaHnqom/Tgm+vU79k4vV4eYTjmRvGfeoW8m9LVcsAy/lGjcgVegKEhLQ==}
+    cpu: [arm]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-android-arm64@4.27.3:
+    resolution: {integrity: sha512-LJc5pDf1wjlt9o/Giaw9Ofl+k/vLUaYsE2zeQGH85giX2F+wn/Cg8b3c5CDP3qmVmeO5NzwVUzQQxwZvC2eQKw==}
+    cpu: [arm64]
+    os: [android]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-darwin-arm64@4.27.3:
+    resolution: {integrity: sha512-OuRysZ1Mt7wpWJ+aYKblVbJWtVn3Cy52h8nLuNSzTqSesYw1EuN6wKp5NW/4eSre3mp12gqFRXOKTcN3AI3LqA==}
+    cpu: [arm64]
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-darwin-x64@4.27.3:
+    resolution: {integrity: sha512-xW//zjJMlJs2sOrCmXdB4d0uiilZsOdlGQIC/jjmMWT47lkLLoB1nsNhPUcnoqyi5YR6I4h+FjBpILxbEy8JRg==}
+    cpu: [x64]
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-freebsd-arm64@4.27.3:
+    resolution: {integrity: sha512-58E0tIcwZ+12nK1WiLzHOD8I0d0kdrY/+o7yFVPRHuVGY3twBwzwDdTIBGRxLmyjciMYl1B/U515GJy+yn46qw==}
+    cpu: [arm64]
+    os: [freebsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-freebsd-x64@4.27.3:
+    resolution: {integrity: sha512-78fohrpcVwTLxg1ZzBMlwEimoAJmY6B+5TsyAZ3Vok7YabRBUvjYTsRXPTjGEvv/mfgVBepbW28OlMEz4w8wGA==}
+    cpu: [x64]
+    os: [freebsd]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-arm-gnueabihf@4.27.3:
+    resolution: {integrity: sha512-h2Ay79YFXyQi+QZKo3ISZDyKaVD7uUvukEHTOft7kh00WF9mxAaxZsNs3o/eukbeKuH35jBvQqrT61fzKfAB/Q==}
+    cpu: [arm]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-arm-musleabihf@4.27.3:
+    resolution: {integrity: sha512-Sv2GWmrJfRY57urktVLQ0VKZjNZGogVtASAgosDZ1aUB+ykPxSi3X1nWORL5Jk0sTIIwQiPH7iE3BMi9zGWfkg==}
+    cpu: [arm]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-arm64-gnu@4.27.3:
+    resolution: {integrity: sha512-FPoJBLsPW2bDNWjSrwNuTPUt30VnfM8GPGRoLCYKZpPx0xiIEdFip3dH6CqgoT0RnoGXptaNziM0WlKgBc+OWQ==}
+    cpu: [arm64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-arm64-musl@4.27.3:
+    resolution: {integrity: sha512-TKxiOvBorYq4sUpA0JT+Fkh+l+G9DScnG5Dqx7wiiqVMiRSkzTclP35pE6eQQYjP4Gc8yEkJGea6rz4qyWhp3g==}
+    cpu: [arm64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-powerpc64le-gnu@4.27.3:
+    resolution: {integrity: sha512-v2M/mPvVUKVOKITa0oCFksnQQ/TqGrT+yD0184/cWHIu0LoIuYHwox0Pm3ccXEz8cEQDLk6FPKd1CCm+PlsISw==}
+    cpu: [ppc64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-riscv64-gnu@4.27.3:
+    resolution: {integrity: sha512-LdrI4Yocb1a/tFVkzmOE5WyYRgEBOyEhWYJe4gsDWDiwnjYKjNs7PS6SGlTDB7maOHF4kxevsuNBl2iOcj3b4A==}
+    cpu: [riscv64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-s390x-gnu@4.27.3:
+    resolution: {integrity: sha512-d4wVu6SXij/jyiwPvI6C4KxdGzuZOvJ6y9VfrcleHTwo68fl8vZC5ZYHsCVPUi4tndCfMlFniWgwonQ5CUpQcA==}
+    cpu: [s390x]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-x64-gnu@4.27.3:
+    resolution: {integrity: sha512-/6bn6pp1fsCGEY5n3yajmzZQAh+mW4QPItbiWxs69zskBzJuheb3tNynEjL+mKOsUSFK11X4LYF2BwwXnzWleA==}
+    cpu: [x64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-linux-x64-musl@4.27.3:
+    resolution: {integrity: sha512-nBXOfJds8OzUT1qUreT/en3eyOXd2EH5b0wr2bVB5999qHdGKkzGzIyKYaKj02lXk6wpN71ltLIaQpu58YFBoQ==}
+    cpu: [x64]
+    os: [linux]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-win32-arm64-msvc@4.27.3:
+    resolution: {integrity: sha512-ogfbEVQgIZOz5WPWXF2HVb6En+kWzScuxJo/WdQTqEgeyGkaa2ui5sQav9Zkr7bnNCLK48uxmmK0TySm22eiuw==}
+    cpu: [arm64]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-win32-ia32-msvc@4.27.3:
+    resolution: {integrity: sha512-ecE36ZBMLINqiTtSNQ1vzWc5pXLQHlf/oqGp/bSbi7iedcjcNb6QbCBNG73Euyy2C+l/fn8qKWEwxr+0SSfs3w==}
+    cpu: [ia32]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rollup/rollup-win32-x64-msvc@4.27.3:
+    resolution: {integrity: sha512-vliZLrDmYKyaUoMzEbMTg2JkerfBjn03KmAw9CykO0Zzkzoyd7o3iZNam/TpyWNjNT+Cz2iO3P9Smv2wgrR+Eg==}
+    cpu: [x64]
+    os: [win32]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /@rtsao/scc@1.1.0:
+    resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==}
+    dev: true
+
+  /@sec-ant/readable-stream@0.4.1:
+    resolution: {integrity: sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==}
+    dev: false
+
+  /@sinclair/typebox@0.27.8:
+    resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
+    dev: true
+
+  /@sindresorhus/is@7.0.1:
+    resolution: {integrity: sha512-QWLl2P+rsCJeofkDNIT3WFmb6NrRud1SUYW8dIhXK/46XFV8Q/g7Bsvib0Askb0reRLe+WYPeeE+l5cH7SlkuQ==}
+    engines: {node: '>=18'}
+    dev: false
+
+  /@szmarczak/http-timer@5.0.1:
+    resolution: {integrity: sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==}
+    engines: {node: '>=14.16'}
+    dependencies:
+      defer-to-connect: 2.0.1
+    dev: false
+
+  /@trivago/prettier-plugin-sort-imports@4.3.0(prettier@3.3.3):
+    resolution: {integrity: sha512-r3n0onD3BTOVUNPhR4lhVK4/pABGpbA7bW3eumZnYdKaHkf1qEC+Mag6DPbGNuuh0eG8AaYj+YqmVHSiGslaTQ==}
+    peerDependencies:
+      '@vue/compiler-sfc': 3.x
+      prettier: 2.x - 3.x
+    peerDependenciesMeta:
+      '@vue/compiler-sfc':
+        optional: true
+    dependencies:
+      '@babel/generator': 7.17.7
+      '@babel/parser': 7.26.2
+      '@babel/traverse': 7.23.2
+      '@babel/types': 7.17.0
+      javascript-natural-sort: 0.7.1
+      lodash: 4.17.21
+      prettier: 3.3.3
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@types/estree@1.0.6:
+    resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==}
+    dev: true
+
+  /@types/http-cache-semantics@4.0.4:
+    resolution: {integrity: sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==}
+    dev: false
+
+  /@types/json5@0.0.29:
+    resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==}
+    dev: true
+
+  /@typescript-eslint/eslint-plugin@7.18.0(@typescript-eslint/parser@7.18.0)(eslint@8.57.1)(typescript@5.6.3):
+    resolution: {integrity: sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    peerDependencies:
+      '@typescript-eslint/parser': ^7.0.0
+      eslint: ^8.56.0
+      typescript: '*'
+    peerDependenciesMeta:
+      typescript:
+        optional: true
+    dependencies:
+      '@eslint-community/regexpp': 4.12.1
+      '@typescript-eslint/parser': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      '@typescript-eslint/scope-manager': 7.18.0
+      '@typescript-eslint/type-utils': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      '@typescript-eslint/utils': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      '@typescript-eslint/visitor-keys': 7.18.0
+      eslint: 8.57.1
+      graphemer: 1.4.0
+      ignore: 5.3.2
+      natural-compare: 1.4.0
+      ts-api-utils: 1.4.0(typescript@5.6.3)
+      typescript: 5.6.3
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@typescript-eslint/parser@7.18.0(eslint@8.57.1)(typescript@5.6.3):
+    resolution: {integrity: sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    peerDependencies:
+      eslint: ^8.56.0
+      typescript: '*'
+    peerDependenciesMeta:
+      typescript:
+        optional: true
+    dependencies:
+      '@typescript-eslint/scope-manager': 7.18.0
+      '@typescript-eslint/types': 7.18.0
+      '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.6.3)
+      '@typescript-eslint/visitor-keys': 7.18.0
+      debug: 4.3.7
+      eslint: 8.57.1
+      typescript: 5.6.3
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@typescript-eslint/scope-manager@7.18.0:
+    resolution: {integrity: sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    dependencies:
+      '@typescript-eslint/types': 7.18.0
+      '@typescript-eslint/visitor-keys': 7.18.0
+    dev: true
+
+  /@typescript-eslint/type-utils@7.18.0(eslint@8.57.1)(typescript@5.6.3):
+    resolution: {integrity: sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    peerDependencies:
+      eslint: ^8.56.0
+      typescript: '*'
+    peerDependenciesMeta:
+      typescript:
+        optional: true
+    dependencies:
+      '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.6.3)
+      '@typescript-eslint/utils': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      debug: 4.3.7
+      eslint: 8.57.1
+      ts-api-utils: 1.4.0(typescript@5.6.3)
+      typescript: 5.6.3
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@typescript-eslint/types@7.18.0:
+    resolution: {integrity: sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    dev: true
+
+  /@typescript-eslint/typescript-estree@7.18.0(typescript@5.6.3):
+    resolution: {integrity: sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    peerDependencies:
+      typescript: '*'
+    peerDependenciesMeta:
+      typescript:
+        optional: true
+    dependencies:
+      '@typescript-eslint/types': 7.18.0
+      '@typescript-eslint/visitor-keys': 7.18.0
+      debug: 4.3.7
+      globby: 11.1.0
+      is-glob: 4.0.3
+      minimatch: 9.0.5
+      semver: 7.6.3
+      ts-api-utils: 1.4.0(typescript@5.6.3)
+      typescript: 5.6.3
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /@typescript-eslint/utils@7.18.0(eslint@8.57.1)(typescript@5.6.3):
+    resolution: {integrity: sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    peerDependencies:
+      eslint: ^8.56.0
+    dependencies:
+      '@eslint-community/eslint-utils': 4.4.1(eslint@8.57.1)
+      '@typescript-eslint/scope-manager': 7.18.0
+      '@typescript-eslint/types': 7.18.0
+      '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.6.3)
+      eslint: 8.57.1
+    transitivePeerDependencies:
+      - supports-color
+      - typescript
+    dev: true
+
+  /@typescript-eslint/visitor-keys@7.18.0:
+    resolution: {integrity: sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==}
+    engines: {node: ^18.18.0 || >=20.0.0}
+    dependencies:
+      '@typescript-eslint/types': 7.18.0
+      eslint-visitor-keys: 3.4.3
+    dev: true
+
+  /@ungap/structured-clone@1.2.0:
+    resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==}
+    dev: true
+
+  /@vercel/ncc@0.38.3:
+    resolution: {integrity: sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==}
+    hasBin: true
+    dev: true
+
+  /@vitest/expect@1.6.0:
+    resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==}
+    dependencies:
+      '@vitest/spy': 1.6.0
+      '@vitest/utils': 1.6.0
+      chai: 4.5.0
+    dev: true
+
+  /@vitest/runner@1.6.0:
+    resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==}
+    dependencies:
+      '@vitest/utils': 1.6.0
+      p-limit: 5.0.0
+      pathe: 1.1.2
+    dev: true
+
+  /@vitest/snapshot@1.6.0:
+    resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==}
+    dependencies:
+      magic-string: 0.30.13
+      pathe: 1.1.2
+      pretty-format: 29.7.0
+    dev: true
+
+  /@vitest/spy@1.6.0:
+    resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==}
+    dependencies:
+      tinyspy: 2.2.1
+    dev: true
+
+  /@vitest/utils@1.6.0:
+    resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==}
+    dependencies:
+      diff-sequences: 29.6.3
+      estree-walker: 3.0.3
+      loupe: 2.3.7
+      pretty-format: 29.7.0
+    dev: true
+
+  /abort-controller@3.0.0:
+    resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
+    engines: {node: '>=6.5'}
+    dependencies:
+      event-target-shim: 5.0.1
+    dev: false
+
+  /acorn-jsx@5.3.2(acorn@8.14.0):
+    resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
+    peerDependencies:
+      acorn: ^6.0.0 || ^7.0.0 || ^8.0.0
+    dependencies:
+      acorn: 8.14.0
+    dev: true
+
+  /acorn-walk@8.3.4:
+    resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==}
+    engines: {node: '>=0.4.0'}
+    dependencies:
+      acorn: 8.14.0
+    dev: true
+
+  /acorn@8.14.0:
+    resolution: {integrity: sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==}
+    engines: {node: '>=0.4.0'}
+    hasBin: true
+    dev: true
+
+  /agent-base@7.1.1:
+    resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==}
+    engines: {node: '>= 14'}
+    dependencies:
+      debug: 4.3.7
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /ajv@6.12.6:
+    resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
+    dependencies:
+      fast-deep-equal: 3.1.3
+      fast-json-stable-stringify: 2.1.0
+      json-schema-traverse: 0.4.1
+      uri-js: 4.4.1
+    dev: true
+
+  /ansi-regex@5.0.1:
+    resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /ansi-regex@6.1.0:
+    resolution: {integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==}
+    engines: {node: '>=12'}
+    dev: true
+
+  /ansi-styles@4.3.0:
+    resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
+    engines: {node: '>=8'}
+    dependencies:
+      color-convert: 2.0.1
+    dev: true
+
+  /ansi-styles@5.2.0:
+    resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==}
+    engines: {node: '>=10'}
+    dev: true
+
+  /ansi-styles@6.2.1:
+    resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==}
+    engines: {node: '>=12'}
+    dev: true
+
+  /any-promise@1.3.0:
+    resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==}
+    dev: true
+
+  /argparse@2.0.1:
+    resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
+    dev: true
+
+  /aria-query@5.3.2:
+    resolution: {integrity: sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /array-buffer-byte-length@1.0.1:
+    resolution: {integrity: sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      is-array-buffer: 3.0.4
+    dev: true
+
+  /array-includes@3.1.8:
+    resolution: {integrity: sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-object-atoms: 1.0.0
+      get-intrinsic: 1.2.4
+      is-string: 1.0.7
+    dev: true
+
+  /array-union@2.1.0:
+    resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /array.prototype.findlastindex@1.2.5:
+    resolution: {integrity: sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-errors: 1.3.0
+      es-object-atoms: 1.0.0
+      es-shim-unscopables: 1.0.2
+    dev: true
+
+  /array.prototype.flat@1.3.2:
+    resolution: {integrity: sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-shim-unscopables: 1.0.2
+    dev: true
+
+  /array.prototype.flatmap@1.3.2:
+    resolution: {integrity: sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-shim-unscopables: 1.0.2
+    dev: true
+
+  /arraybuffer.prototype.slice@1.0.3:
+    resolution: {integrity: sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      array-buffer-byte-length: 1.0.1
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-errors: 1.3.0
+      get-intrinsic: 1.2.4
+      is-array-buffer: 3.0.4
+      is-shared-array-buffer: 1.0.3
+    dev: true
+
+  /assertion-error@1.1.0:
+    resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==}
+    dev: true
+
+  /ast-types-flow@0.0.8:
+    resolution: {integrity: sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==}
+    dev: true
+
+  /asynckit@0.4.0:
+    resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
+    dev: false
+
+  /available-typed-arrays@1.0.7:
+    resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      possible-typed-array-names: 1.0.0
+    dev: true
+
+  /axe-core@4.10.2:
+    resolution: {integrity: sha512-RE3mdQ7P3FRSe7eqCWoeQ/Z9QXrtniSjp1wUjt5nRC3WIpz5rSCve6o3fsZ2aCpJtrZjSZgjwXAoTO5k4tEI0w==}
+    engines: {node: '>=4'}
+    dev: true
+
+  /axobject-query@4.1.0:
+    resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /balanced-match@1.0.2:
+    resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
+
+  /brace-expansion@1.1.11:
+    resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
+    dependencies:
+      balanced-match: 1.0.2
+      concat-map: 0.0.1
+
+  /brace-expansion@2.0.1:
+    resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
+    dependencies:
+      balanced-match: 1.0.2
+    dev: true
+
+  /braces@3.0.3:
+    resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
+    engines: {node: '>=8'}
+    dependencies:
+      fill-range: 7.1.1
+    dev: true
+
+  /browserslist@4.24.2:
+    resolution: {integrity: sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==}
+    engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
+    hasBin: true
+    dependencies:
+      caniuse-lite: 1.0.30001680
+      electron-to-chromium: 1.5.63
+      node-releases: 2.0.18
+      update-browserslist-db: 1.1.1(browserslist@4.24.2)
+    dev: true
+
+  /bundle-require@5.0.0(esbuild@0.24.0):
+    resolution: {integrity: sha512-GuziW3fSSmopcx4KRymQEJVbZUfqlCqcq7dvs6TYwKRZiegK/2buMxQTPs6MGlNv50wms1699qYO54R8XfRX4w==}
+    engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+    peerDependencies:
+      esbuild: '>=0.18'
+    dependencies:
+      esbuild: 0.24.0
+      load-tsconfig: 0.2.5
+    dev: true
+
+  /cac@6.7.14:
+    resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /cacheable-lookup@7.0.0:
+    resolution: {integrity: sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==}
+    engines: {node: '>=14.16'}
+    dev: false
+
+  /cacheable-request@12.0.1:
+    resolution: {integrity: sha512-Yo9wGIQUaAfIbk+qY0X4cDQgCosecfBe3V9NSyeY4qPC2SAkbCS4Xj79VP8WOzitpJUZKc/wsRCYF5ariDIwkg==}
+    engines: {node: '>=18'}
+    dependencies:
+      '@types/http-cache-semantics': 4.0.4
+      get-stream: 9.0.1
+      http-cache-semantics: 4.1.1
+      keyv: 4.5.4
+      mimic-response: 4.0.0
+      normalize-url: 8.0.1
+      responselike: 3.0.0
+    dev: false
+
+  /call-bind@1.0.7:
+    resolution: {integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      es-define-property: 1.0.0
+      es-errors: 1.3.0
+      function-bind: 1.1.2
+      get-intrinsic: 1.2.4
+      set-function-length: 1.2.2
+    dev: true
+
+  /callsites@3.1.0:
+    resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
+    engines: {node: '>=6'}
+    dev: true
+
+  /caniuse-lite@1.0.30001680:
+    resolution: {integrity: sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA==}
+    dev: true
+
+  /chai@4.5.0:
+    resolution: {integrity: sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==}
+    engines: {node: '>=4'}
+    dependencies:
+      assertion-error: 1.1.0
+      check-error: 1.0.3
+      deep-eql: 4.1.4
+      get-func-name: 2.0.2
+      loupe: 2.3.7
+      pathval: 1.1.1
+      type-detect: 4.1.0
+    dev: true
+
+  /chalk@4.1.2:
+    resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
+    engines: {node: '>=10'}
+    dependencies:
+      ansi-styles: 4.3.0
+      supports-color: 7.2.0
+    dev: true
+
+  /check-error@1.0.3:
+    resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==}
+    dependencies:
+      get-func-name: 2.0.2
+    dev: true
+
+  /chokidar@4.0.1:
+    resolution: {integrity: sha512-n8enUVCED/KVRQlab1hr3MVpcVMvxtZjmEa956u+4YijlmQED223XMSYj2tLuKvr4jcCTzNNMpQDUer72MMmzA==}
+    engines: {node: '>= 14.16.0'}
+    dependencies:
+      readdirp: 4.0.2
+    dev: true
+
+  /color-convert@2.0.1:
+    resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
+    engines: {node: '>=7.0.0'}
+    dependencies:
+      color-name: 1.1.4
+    dev: true
+
+  /color-name@1.1.4:
+    resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
+    dev: true
+
+  /combined-stream@1.0.8:
+    resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
+    engines: {node: '>= 0.8'}
+    dependencies:
+      delayed-stream: 1.0.0
+    dev: false
+
+  /commander@4.1.1:
+    resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
+    engines: {node: '>= 6'}
+    dev: true
+
+  /concat-map@0.0.1:
+    resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
+
+  /confbox@0.1.8:
+    resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==}
+    dev: true
+
+  /consola@3.2.3:
+    resolution: {integrity: sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ==}
+    engines: {node: ^14.18.0 || >=16.10.0}
+    dev: true
+
+  /cross-spawn@7.0.6:
+    resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
+    engines: {node: '>= 8'}
+    dependencies:
+      path-key: 3.1.1
+      shebang-command: 2.0.0
+      which: 2.0.2
+    dev: true
+
+  /damerau-levenshtein@1.0.8:
+    resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==}
+    dev: true
+
+  /data-view-buffer@1.0.1:
+    resolution: {integrity: sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      is-data-view: 1.0.1
+    dev: true
+
+  /data-view-byte-length@1.0.1:
+    resolution: {integrity: sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      is-data-view: 1.0.1
+    dev: true
+
+  /data-view-byte-offset@1.0.0:
+    resolution: {integrity: sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      is-data-view: 1.0.1
+    dev: true
+
+  /debug@3.2.7:
+    resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
+    peerDependencies:
+      supports-color: '*'
+    peerDependenciesMeta:
+      supports-color:
+        optional: true
+    dependencies:
+      ms: 2.1.3
+    dev: true
+
+  /debug@4.3.7:
+    resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==}
+    engines: {node: '>=6.0'}
+    peerDependencies:
+      supports-color: '*'
+    peerDependenciesMeta:
+      supports-color:
+        optional: true
+    dependencies:
+      ms: 2.1.3
+
+  /decompress-response@6.0.0:
+    resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==}
+    engines: {node: '>=10'}
+    dependencies:
+      mimic-response: 3.1.0
+    dev: false
+
+  /deep-eql@4.1.4:
+    resolution: {integrity: sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==}
+    engines: {node: '>=6'}
+    dependencies:
+      type-detect: 4.1.0
+    dev: true
+
+  /deep-is@0.1.4:
+    resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==}
+    dev: true
+
+  /defer-to-connect@2.0.1:
+    resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==}
+    engines: {node: '>=10'}
+    dev: false
+
+  /define-data-property@1.1.4:
+    resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      es-define-property: 1.0.0
+      es-errors: 1.3.0
+      gopd: 1.0.1
+    dev: true
+
+  /define-properties@1.2.1:
+    resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      define-data-property: 1.1.4
+      has-property-descriptors: 1.0.2
+      object-keys: 1.1.1
+    dev: true
+
+  /delayed-stream@1.0.0:
+    resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
+    engines: {node: '>=0.4.0'}
+    dev: false
+
+  /diff-sequences@29.6.3:
+    resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==}
+    engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+    dev: true
+
+  /dir-glob@3.0.1:
+    resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==}
+    engines: {node: '>=8'}
+    dependencies:
+      path-type: 4.0.0
+    dev: true
+
+  /doctrine@2.1.0:
+    resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==}
+    engines: {node: '>=0.10.0'}
+    dependencies:
+      esutils: 2.0.3
+    dev: true
+
+  /doctrine@3.0.0:
+    resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
+    engines: {node: '>=6.0.0'}
+    dependencies:
+      esutils: 2.0.3
+    dev: true
+
+  /eastasianwidth@0.2.0:
+    resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==}
+    dev: true
+
+  /electron-to-chromium@1.5.63:
+    resolution: {integrity: sha512-ddeXKuY9BHo/mw145axlyWjlJ1UBt4WK3AlvkT7W2AbqfRQoacVoRUCF6wL3uIx/8wT9oLKXzI+rFqHHscByaA==}
+    dev: true
+
+  /emoji-regex@8.0.0:
+    resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
+    dev: true
+
+  /emoji-regex@9.2.2:
+    resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==}
+    dev: true
+
+  /enhanced-resolve@5.17.1:
+    resolution: {integrity: sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==}
+    engines: {node: '>=10.13.0'}
+    dependencies:
+      graceful-fs: 4.2.11
+      tapable: 2.2.1
+    dev: true
+
+  /es-abstract@1.23.5:
+    resolution: {integrity: sha512-vlmniQ0WNPwXqA0BnmwV3Ng7HxiGlh6r5U6JcTMNx8OilcAGqVJBHJcPjqOMaczU9fRuRK5Px2BdVyPRnKMMVQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      array-buffer-byte-length: 1.0.1
+      arraybuffer.prototype.slice: 1.0.3
+      available-typed-arrays: 1.0.7
+      call-bind: 1.0.7
+      data-view-buffer: 1.0.1
+      data-view-byte-length: 1.0.1
+      data-view-byte-offset: 1.0.0
+      es-define-property: 1.0.0
+      es-errors: 1.3.0
+      es-object-atoms: 1.0.0
+      es-set-tostringtag: 2.0.3
+      es-to-primitive: 1.2.1
+      function.prototype.name: 1.1.6
+      get-intrinsic: 1.2.4
+      get-symbol-description: 1.0.2
+      globalthis: 1.0.4
+      gopd: 1.0.1
+      has-property-descriptors: 1.0.2
+      has-proto: 1.0.3
+      has-symbols: 1.0.3
+      hasown: 2.0.2
+      internal-slot: 1.0.7
+      is-array-buffer: 3.0.4
+      is-callable: 1.2.7
+      is-data-view: 1.0.1
+      is-negative-zero: 2.0.3
+      is-regex: 1.1.4
+      is-shared-array-buffer: 1.0.3
+      is-string: 1.0.7
+      is-typed-array: 1.1.13
+      is-weakref: 1.0.2
+      object-inspect: 1.13.3
+      object-keys: 1.1.1
+      object.assign: 4.1.5
+      regexp.prototype.flags: 1.5.3
+      safe-array-concat: 1.1.2
+      safe-regex-test: 1.0.3
+      string.prototype.trim: 1.2.9
+      string.prototype.trimend: 1.0.8
+      string.prototype.trimstart: 1.0.8
+      typed-array-buffer: 1.0.2
+      typed-array-byte-length: 1.0.1
+      typed-array-byte-offset: 1.0.2
+      typed-array-length: 1.0.6
+      unbox-primitive: 1.0.2
+      which-typed-array: 1.1.15
+    dev: true
+
+  /es-define-property@1.0.0:
+    resolution: {integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      get-intrinsic: 1.2.4
+    dev: true
+
+  /es-errors@1.3.0:
+    resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /es-object-atoms@1.0.0:
+    resolution: {integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      es-errors: 1.3.0
+    dev: true
+
+  /es-set-tostringtag@2.0.3:
+    resolution: {integrity: sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      get-intrinsic: 1.2.4
+      has-tostringtag: 1.0.2
+      hasown: 2.0.2
+    dev: true
+
+  /es-shim-unscopables@1.0.2:
+    resolution: {integrity: sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==}
+    dependencies:
+      hasown: 2.0.2
+    dev: true
+
+  /es-to-primitive@1.2.1:
+    resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      is-callable: 1.2.7
+      is-date-object: 1.0.5
+      is-symbol: 1.0.4
+    dev: true
+
+  /esbuild@0.21.5:
+    resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==}
+    engines: {node: '>=12'}
+    hasBin: true
+    requiresBuild: true
+    optionalDependencies:
+      '@esbuild/aix-ppc64': 0.21.5
+      '@esbuild/android-arm': 0.21.5
+      '@esbuild/android-arm64': 0.21.5
+      '@esbuild/android-x64': 0.21.5
+      '@esbuild/darwin-arm64': 0.21.5
+      '@esbuild/darwin-x64': 0.21.5
+      '@esbuild/freebsd-arm64': 0.21.5
+      '@esbuild/freebsd-x64': 0.21.5
+      '@esbuild/linux-arm': 0.21.5
+      '@esbuild/linux-arm64': 0.21.5
+      '@esbuild/linux-ia32': 0.21.5
+      '@esbuild/linux-loong64': 0.21.5
+      '@esbuild/linux-mips64el': 0.21.5
+      '@esbuild/linux-ppc64': 0.21.5
+      '@esbuild/linux-riscv64': 0.21.5
+      '@esbuild/linux-s390x': 0.21.5
+      '@esbuild/linux-x64': 0.21.5
+      '@esbuild/netbsd-x64': 0.21.5
+      '@esbuild/openbsd-x64': 0.21.5
+      '@esbuild/sunos-x64': 0.21.5
+      '@esbuild/win32-arm64': 0.21.5
+      '@esbuild/win32-ia32': 0.21.5
+      '@esbuild/win32-x64': 0.21.5
+    dev: true
+
+  /esbuild@0.24.0:
+    resolution: {integrity: sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==}
+    engines: {node: '>=18'}
+    hasBin: true
+    requiresBuild: true
+    optionalDependencies:
+      '@esbuild/aix-ppc64': 0.24.0
+      '@esbuild/android-arm': 0.24.0
+      '@esbuild/android-arm64': 0.24.0
+      '@esbuild/android-x64': 0.24.0
+      '@esbuild/darwin-arm64': 0.24.0
+      '@esbuild/darwin-x64': 0.24.0
+      '@esbuild/freebsd-arm64': 0.24.0
+      '@esbuild/freebsd-x64': 0.24.0
+      '@esbuild/linux-arm': 0.24.0
+      '@esbuild/linux-arm64': 0.24.0
+      '@esbuild/linux-ia32': 0.24.0
+      '@esbuild/linux-loong64': 0.24.0
+      '@esbuild/linux-mips64el': 0.24.0
+      '@esbuild/linux-ppc64': 0.24.0
+      '@esbuild/linux-riscv64': 0.24.0
+      '@esbuild/linux-s390x': 0.24.0
+      '@esbuild/linux-x64': 0.24.0
+      '@esbuild/netbsd-x64': 0.24.0
+      '@esbuild/openbsd-arm64': 0.24.0
+      '@esbuild/openbsd-x64': 0.24.0
+      '@esbuild/sunos-x64': 0.24.0
+      '@esbuild/win32-arm64': 0.24.0
+      '@esbuild/win32-ia32': 0.24.0
+      '@esbuild/win32-x64': 0.24.0
+    dev: true
+
+  /escalade@3.2.0:
+    resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
+    engines: {node: '>=6'}
+    dev: true
+
+  /escape-string-regexp@1.0.5:
+    resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==}
+    engines: {node: '>=0.8.0'}
+    dev: true
+
+  /escape-string-regexp@4.0.0:
+    resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
+    engines: {node: '>=10'}
+    dev: true
+
+  /eslint-config-prettier@9.1.0(eslint@8.57.1):
+    resolution: {integrity: sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==}
+    hasBin: true
+    peerDependencies:
+      eslint: '>=7.0.0'
+    dependencies:
+      eslint: 8.57.1
+    dev: true
+
+  /eslint-import-resolver-node@0.3.9:
+    resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==}
+    dependencies:
+      debug: 3.2.7
+      is-core-module: 2.15.1
+      resolve: 1.22.8
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@7.18.0)(eslint-plugin-import@2.31.0)(eslint@8.57.1):
+    resolution: {integrity: sha512-ud9aw4szY9cCT1EWWdGv1L1XR6hh2PaRWif0j2QjQ0pgTY/69iw+W0Z4qZv5wHahOl8isEr+k/JnyAqNQkLkIA==}
+    engines: {node: ^14.18.0 || >=16.0.0}
+    peerDependencies:
+      eslint: '*'
+      eslint-plugin-import: '*'
+      eslint-plugin-import-x: '*'
+    peerDependenciesMeta:
+      eslint-plugin-import:
+        optional: true
+      eslint-plugin-import-x:
+        optional: true
+    dependencies:
+      '@nolyfill/is-core-module': 1.0.39
+      debug: 4.3.7
+      enhanced-resolve: 5.17.1
+      eslint: 8.57.1
+      eslint-module-utils: 2.12.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)
+      eslint-plugin-import: 2.31.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)
+      fast-glob: 3.3.2
+      get-tsconfig: 4.8.1
+      is-bun-module: 1.2.1
+      is-glob: 4.0.3
+    transitivePeerDependencies:
+      - '@typescript-eslint/parser'
+      - eslint-import-resolver-node
+      - eslint-import-resolver-webpack
+      - supports-color
+    dev: true
+
+  /eslint-module-utils@2.12.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1):
+    resolution: {integrity: sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==}
+    engines: {node: '>=4'}
+    peerDependencies:
+      '@typescript-eslint/parser': '*'
+      eslint: '*'
+      eslint-import-resolver-node: '*'
+      eslint-import-resolver-typescript: '*'
+      eslint-import-resolver-webpack: '*'
+    peerDependenciesMeta:
+      '@typescript-eslint/parser':
+        optional: true
+      eslint:
+        optional: true
+      eslint-import-resolver-node:
+        optional: true
+      eslint-import-resolver-typescript:
+        optional: true
+      eslint-import-resolver-webpack:
+        optional: true
+    dependencies:
+      '@typescript-eslint/parser': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      debug: 3.2.7
+      eslint: 8.57.1
+      eslint-import-resolver-node: 0.3.9
+      eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@7.18.0)(eslint-plugin-import@2.31.0)(eslint@8.57.1)
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /eslint-plugin-escompat@3.11.4(eslint@8.57.1):
+    resolution: {integrity: sha512-j0ywwNnIufshOzgAu+PfIig1c7VRClKSNKzpniMT2vXQ4leL5q+e/SpMFQU0nrdL2WFFM44XmhSuwmxb3G0CJg==}
+    peerDependencies:
+      eslint: '>=5.14.1'
+    dependencies:
+      browserslist: 4.24.2
+      eslint: 8.57.1
+    dev: true
+
+  /eslint-plugin-eslint-comments@3.2.0(eslint@8.57.1):
+    resolution: {integrity: sha512-0jkOl0hfojIHHmEHgmNdqv4fmh7300NdpA9FFpF7zaoLvB/QeXOGNLIo86oAveJFrfB1p05kC8hpEMHM8DwWVQ==}
+    engines: {node: '>=6.5.0'}
+    peerDependencies:
+      eslint: '>=4.19.1'
+    dependencies:
+      escape-string-regexp: 1.0.5
+      eslint: 8.57.1
+      ignore: 5.3.2
+    dev: true
+
+  /eslint-plugin-filenames@1.3.2(eslint@8.57.1):
+    resolution: {integrity: sha512-tqxJTiEM5a0JmRCUYQmxw23vtTxrb2+a3Q2mMOPhFxvt7ZQQJmdiuMby9B/vUAuVMghyP7oET+nIf6EO6CBd/w==}
+    peerDependencies:
+      eslint: '*'
+    dependencies:
+      eslint: 8.57.1
+      lodash.camelcase: 4.3.0
+      lodash.kebabcase: 4.1.1
+      lodash.snakecase: 4.1.1
+      lodash.upperfirst: 4.3.1
+    dev: true
+
+  /eslint-plugin-github@4.10.2(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)(typescript@5.6.3):
+    resolution: {integrity: sha512-F1F5aAFgi1Y5hYoTFzGQACBkw5W1hu2Fu5FSTrMlXqrojJnKl1S2pWO/rprlowRQpt+hzHhqSpsfnodJEVd5QA==}
+    hasBin: true
+    peerDependencies:
+      eslint: ^8.0.1
+    dependencies:
+      '@github/browserslist-config': 1.0.0
+      '@typescript-eslint/eslint-plugin': 7.18.0(@typescript-eslint/parser@7.18.0)(eslint@8.57.1)(typescript@5.6.3)
+      '@typescript-eslint/parser': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      aria-query: 5.3.2
+      eslint: 8.57.1
+      eslint-config-prettier: 9.1.0(eslint@8.57.1)
+      eslint-plugin-escompat: 3.11.4(eslint@8.57.1)
+      eslint-plugin-eslint-comments: 3.2.0(eslint@8.57.1)
+      eslint-plugin-filenames: 1.3.2(eslint@8.57.1)
+      eslint-plugin-i18n-text: 1.0.1(eslint@8.57.1)
+      eslint-plugin-import: 2.31.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)
+      eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1)
+      eslint-plugin-no-only-tests: 3.3.0
+      eslint-plugin-prettier: 5.2.1(eslint-config-prettier@9.1.0)(eslint@8.57.1)(prettier@3.3.3)
+      eslint-rule-documentation: 1.0.23
+      jsx-ast-utils: 3.3.5
+      prettier: 3.3.3
+      svg-element-attributes: 1.3.1
+    transitivePeerDependencies:
+      - '@types/eslint'
+      - eslint-import-resolver-typescript
+      - eslint-import-resolver-webpack
+      - supports-color
+      - typescript
+    dev: true
+
+  /eslint-plugin-i18n-text@1.0.1(eslint@8.57.1):
+    resolution: {integrity: sha512-3G3UetST6rdqhqW9SfcfzNYMpQXS7wNkJvp6dsXnjzGiku6Iu5hl3B0kmk6lIcFPwYjhQIY+tXVRtK9TlGT7RA==}
+    peerDependencies:
+      eslint: '>=5.0.0'
+    dependencies:
+      eslint: 8.57.1
+    dev: true
+
+  /eslint-plugin-import@2.31.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1):
+    resolution: {integrity: sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==}
+    engines: {node: '>=4'}
+    peerDependencies:
+      '@typescript-eslint/parser': '*'
+      eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9
+    peerDependenciesMeta:
+      '@typescript-eslint/parser':
+        optional: true
+    dependencies:
+      '@rtsao/scc': 1.1.0
+      '@typescript-eslint/parser': 7.18.0(eslint@8.57.1)(typescript@5.6.3)
+      array-includes: 3.1.8
+      array.prototype.findlastindex: 1.2.5
+      array.prototype.flat: 1.3.2
+      array.prototype.flatmap: 1.3.2
+      debug: 3.2.7
+      doctrine: 2.1.0
+      eslint: 8.57.1
+      eslint-import-resolver-node: 0.3.9
+      eslint-module-utils: 2.12.0(@typescript-eslint/parser@7.18.0)(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3)(eslint@8.57.1)
+      hasown: 2.0.2
+      is-core-module: 2.15.1
+      is-glob: 4.0.3
+      minimatch: 3.1.2
+      object.fromentries: 2.0.8
+      object.groupby: 1.0.3
+      object.values: 1.2.0
+      semver: 6.3.1
+      string.prototype.trimend: 1.0.8
+      tsconfig-paths: 3.15.0
+    transitivePeerDependencies:
+      - eslint-import-resolver-typescript
+      - eslint-import-resolver-webpack
+      - supports-color
+    dev: true
+
+  /eslint-plugin-jsx-a11y@6.10.2(eslint@8.57.1):
+    resolution: {integrity: sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==}
+    engines: {node: '>=4.0'}
+    peerDependencies:
+      eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9
+    dependencies:
+      aria-query: 5.3.2
+      array-includes: 3.1.8
+      array.prototype.flatmap: 1.3.2
+      ast-types-flow: 0.0.8
+      axe-core: 4.10.2
+      axobject-query: 4.1.0
+      damerau-levenshtein: 1.0.8
+      emoji-regex: 9.2.2
+      eslint: 8.57.1
+      hasown: 2.0.2
+      jsx-ast-utils: 3.3.5
+      language-tags: 1.0.9
+      minimatch: 3.1.2
+      object.fromentries: 2.0.8
+      safe-regex-test: 1.0.3
+      string.prototype.includes: 2.0.1
+    dev: true
+
+  /eslint-plugin-no-only-tests@3.3.0:
+    resolution: {integrity: sha512-brcKcxGnISN2CcVhXJ/kEQlNa0MEfGRtwKtWA16SkqXHKitaKIMrfemJKLKX1YqDU5C/5JY3PvZXd5jEW04e0Q==}
+    engines: {node: '>=5.0.0'}
+    dev: true
+
+  /eslint-plugin-prettier@5.2.1(eslint-config-prettier@9.1.0)(eslint@8.57.1)(prettier@3.3.3):
+    resolution: {integrity: sha512-gH3iR3g4JfF+yYPaJYkN7jEl9QbweL/YfkoRlNnuIEHEz1vHVlCmWOS+eGGiRuzHQXdJFCOTxRgvju9b8VUmrw==}
+    engines: {node: ^14.18.0 || >=16.0.0}
+    peerDependencies:
+      '@types/eslint': '>=8.0.0'
+      eslint: '>=8.0.0'
+      eslint-config-prettier: '*'
+      prettier: '>=3.0.0'
+    peerDependenciesMeta:
+      '@types/eslint':
+        optional: true
+      eslint-config-prettier:
+        optional: true
+    dependencies:
+      eslint: 8.57.1
+      eslint-config-prettier: 9.1.0(eslint@8.57.1)
+      prettier: 3.3.3
+      prettier-linter-helpers: 1.0.0
+      synckit: 0.9.2
+    dev: true
+
+  /eslint-rule-documentation@1.0.23:
+    resolution: {integrity: sha512-pWReu3fkohwyvztx/oQWWgld2iad25TfUdi6wvhhaDPIQjHU/pyvlKgXFw1kX31SQK2Nq9MH+vRDWB0ZLy8fYw==}
+    engines: {node: '>=4.0.0'}
+    dev: true
+
+  /eslint-scope@7.2.2:
+    resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    dependencies:
+      esrecurse: 4.3.0
+      estraverse: 5.3.0
+    dev: true
+
+  /eslint-visitor-keys@3.4.3:
+    resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    dev: true
+
+  /eslint@8.57.1:
+    resolution: {integrity: sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options.
+    hasBin: true
+    dependencies:
+      '@eslint-community/eslint-utils': 4.4.1(eslint@8.57.1)
+      '@eslint-community/regexpp': 4.12.1
+      '@eslint/eslintrc': 2.1.4
+      '@eslint/js': 8.57.1
+      '@humanwhocodes/config-array': 0.13.0
+      '@humanwhocodes/module-importer': 1.0.1
+      '@nodelib/fs.walk': 1.2.8
+      '@ungap/structured-clone': 1.2.0
+      ajv: 6.12.6
+      chalk: 4.1.2
+      cross-spawn: 7.0.6
+      debug: 4.3.7
+      doctrine: 3.0.0
+      escape-string-regexp: 4.0.0
+      eslint-scope: 7.2.2
+      eslint-visitor-keys: 3.4.3
+      espree: 9.6.1
+      esquery: 1.6.0
+      esutils: 2.0.3
+      fast-deep-equal: 3.1.3
+      file-entry-cache: 6.0.1
+      find-up: 5.0.0
+      glob-parent: 6.0.2
+      globals: 13.24.0
+      graphemer: 1.4.0
+      ignore: 5.3.2
+      imurmurhash: 0.1.4
+      is-glob: 4.0.3
+      is-path-inside: 3.0.3
+      js-yaml: 4.1.0
+      json-stable-stringify-without-jsonify: 1.0.1
+      levn: 0.4.1
+      lodash.merge: 4.6.2
+      minimatch: 3.1.2
+      natural-compare: 1.4.0
+      optionator: 0.9.4
+      strip-ansi: 6.0.1
+      text-table: 0.2.0
+    transitivePeerDependencies:
+      - supports-color
+    dev: true
+
+  /espree@9.6.1:
+    resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==}
+    engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    dependencies:
+      acorn: 8.14.0
+      acorn-jsx: 5.3.2(acorn@8.14.0)
+      eslint-visitor-keys: 3.4.3
+    dev: true
+
+  /esquery@1.6.0:
+    resolution: {integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==}
+    engines: {node: '>=0.10'}
+    dependencies:
+      estraverse: 5.3.0
+    dev: true
+
+  /esrecurse@4.3.0:
+    resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==}
+    engines: {node: '>=4.0'}
+    dependencies:
+      estraverse: 5.3.0
+    dev: true
+
+  /estraverse@5.3.0:
+    resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==}
+    engines: {node: '>=4.0'}
+    dev: true
+
+  /estree-walker@3.0.3:
+    resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==}
+    dependencies:
+      '@types/estree': 1.0.6
+    dev: true
+
+  /esutils@2.0.3:
+    resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /event-target-shim@5.0.1:
+    resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==}
+    engines: {node: '>=6'}
+    dev: false
+
+  /events@3.3.0:
+    resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
+    engines: {node: '>=0.8.x'}
+    dev: false
+
+  /execa@8.0.1:
+    resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==}
+    engines: {node: '>=16.17'}
+    dependencies:
+      cross-spawn: 7.0.6
+      get-stream: 8.0.1
+      human-signals: 5.0.0
+      is-stream: 3.0.0
+      merge-stream: 2.0.0
+      npm-run-path: 5.3.0
+      onetime: 6.0.0
+      signal-exit: 4.1.0
+      strip-final-newline: 3.0.0
+    dev: true
+
+  /fast-deep-equal@3.1.3:
+    resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
+    dev: true
+
+  /fast-diff@1.3.0:
+    resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==}
+    dev: true
+
+  /fast-glob@3.3.2:
+    resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==}
+    engines: {node: '>=8.6.0'}
+    dependencies:
+      '@nodelib/fs.stat': 2.0.5
+      '@nodelib/fs.walk': 1.2.8
+      glob-parent: 5.1.2
+      merge2: 1.4.1
+      micromatch: 4.0.8
+    dev: true
+
+  /fast-json-stable-stringify@2.1.0:
+    resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==}
+    dev: true
+
+  /fast-levenshtein@2.0.6:
+    resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==}
+    dev: true
+
+  /fast-xml-parser@4.5.0:
+    resolution: {integrity: sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==}
+    hasBin: true
+    dependencies:
+      strnum: 1.0.5
+    dev: false
+
+  /fastq@1.17.1:
+    resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==}
+    dependencies:
+      reusify: 1.0.4
+    dev: true
+
+  /fdir@6.4.2(picomatch@4.0.2):
+    resolution: {integrity: sha512-KnhMXsKSPZlAhp7+IjUkRZKPb4fUyccpDrdFXbi4QL1qkmFh9kVY09Yox+n4MaOb3lHZ1Tv829C3oaaXoMYPDQ==}
+    peerDependencies:
+      picomatch: ^3 || ^4
+    peerDependenciesMeta:
+      picomatch:
+        optional: true
+    dependencies:
+      picomatch: 4.0.2
+    dev: true
+
+  /file-entry-cache@6.0.1:
+    resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
+    engines: {node: ^10.12.0 || >=12.0.0}
+    dependencies:
+      flat-cache: 3.2.0
+    dev: true
+
+  /fill-range@7.1.1:
+    resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
+    engines: {node: '>=8'}
+    dependencies:
+      to-regex-range: 5.0.1
+    dev: true
+
+  /find-up@5.0.0:
+    resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==}
+    engines: {node: '>=10'}
+    dependencies:
+      locate-path: 6.0.0
+      path-exists: 4.0.0
+    dev: true
+
+  /flat-cache@3.2.0:
+    resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==}
+    engines: {node: ^10.12.0 || >=12.0.0}
+    dependencies:
+      flatted: 3.3.2
+      keyv: 4.5.4
+      rimraf: 3.0.2
+    dev: true
+
+  /flatted@3.3.2:
+    resolution: {integrity: sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==}
+    dev: true
+
+  /for-each@0.3.3:
+    resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==}
+    dependencies:
+      is-callable: 1.2.7
+    dev: true
+
+  /foreground-child@3.3.0:
+    resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==}
+    engines: {node: '>=14'}
+    dependencies:
+      cross-spawn: 7.0.6
+      signal-exit: 4.1.0
+    dev: true
+
+  /form-data-encoder@4.0.2:
+    resolution: {integrity: sha512-KQVhvhK8ZkWzxKxOr56CPulAhH3dobtuQ4+hNQ+HekH/Wp5gSOafqRAeTphQUJAIk0GBvHZgJ2ZGRWd5kphMuw==}
+    engines: {node: '>= 18'}
+    dev: false
+
+  /form-data@2.5.2:
+    resolution: {integrity: sha512-GgwY0PS7DbXqajuGf4OYlsrIu3zgxD6Vvql43IBhm6MahqA5SK/7mwhtNj2AdH2z35YR34ujJ7BN+3fFC3jP5Q==}
+    engines: {node: '>= 0.12'}
+    dependencies:
+      asynckit: 0.4.0
+      combined-stream: 1.0.8
+      mime-types: 2.1.35
+      safe-buffer: 5.2.1
+    dev: false
+
+  /fs.realpath@1.0.0:
+    resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
+    dev: true
+
+  /fsevents@2.3.3:
+    resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
+    engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
+    os: [darwin]
+    requiresBuild: true
+    dev: true
+    optional: true
+
+  /function-bind@1.1.2:
+    resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
+    dev: true
+
+  /function.prototype.name@1.1.6:
+    resolution: {integrity: sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      functions-have-names: 1.2.3
+    dev: true
+
+  /functions-have-names@1.2.3:
+    resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==}
+    dev: true
+
+  /get-func-name@2.0.2:
+    resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==}
+    dev: true
+
+  /get-intrinsic@1.2.4:
+    resolution: {integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      es-errors: 1.3.0
+      function-bind: 1.1.2
+      has-proto: 1.0.3
+      has-symbols: 1.0.3
+      hasown: 2.0.2
+    dev: true
+
+  /get-stream@8.0.1:
+    resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==}
+    engines: {node: '>=16'}
+    dev: true
+
+  /get-stream@9.0.1:
+    resolution: {integrity: sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==}
+    engines: {node: '>=18'}
+    dependencies:
+      '@sec-ant/readable-stream': 0.4.1
+      is-stream: 4.0.1
+    dev: false
+
+  /get-symbol-description@1.0.2:
+    resolution: {integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      get-intrinsic: 1.2.4
+    dev: true
+
+  /get-tsconfig@4.8.1:
+    resolution: {integrity: sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==}
+    dependencies:
+      resolve-pkg-maps: 1.0.0
+    dev: true
+
+  /glob-parent@5.1.2:
+    resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
+    engines: {node: '>= 6'}
+    dependencies:
+      is-glob: 4.0.3
+    dev: true
+
+  /glob-parent@6.0.2:
+    resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==}
+    engines: {node: '>=10.13.0'}
+    dependencies:
+      is-glob: 4.0.3
+    dev: true
+
+  /glob@10.4.5:
+    resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==}
+    hasBin: true
+    dependencies:
+      foreground-child: 3.3.0
+      jackspeak: 3.4.3
+      minimatch: 9.0.5
+      minipass: 7.1.2
+      package-json-from-dist: 1.0.1
+      path-scurry: 1.11.1
+    dev: true
+
+  /glob@7.2.3:
+    resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==}
+    deprecated: Glob versions prior to v9 are no longer supported
+    dependencies:
+      fs.realpath: 1.0.0
+      inflight: 1.0.6
+      inherits: 2.0.4
+      minimatch: 3.1.2
+      once: 1.4.0
+      path-is-absolute: 1.0.1
+    dev: true
+
+  /globals@11.12.0:
+    resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==}
+    engines: {node: '>=4'}
+    dev: true
+
+  /globals@13.24.0:
+    resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==}
+    engines: {node: '>=8'}
+    dependencies:
+      type-fest: 0.20.2
+    dev: true
+
+  /globalthis@1.0.4:
+    resolution: {integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      define-properties: 1.2.1
+      gopd: 1.0.1
+    dev: true
+
+  /globby@11.1.0:
+    resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==}
+    engines: {node: '>=10'}
+    dependencies:
+      array-union: 2.1.0
+      dir-glob: 3.0.1
+      fast-glob: 3.3.2
+      ignore: 5.3.2
+      merge2: 1.4.1
+      slash: 3.0.0
+    dev: true
+
+  /gopd@1.0.1:
+    resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==}
+    dependencies:
+      get-intrinsic: 1.2.4
+    dev: true
+
+  /got@14.4.4:
+    resolution: {integrity: sha512-tqiF7eSgTBwQkxb1LxsEpva8TaMYVisbhplrFVmw9GQE3855Z+MH/mnsXLLOkDxR6hZJRFMj5VTAZ8lmTF8ZOA==}
+    engines: {node: '>=20'}
+    dependencies:
+      '@sindresorhus/is': 7.0.1
+      '@szmarczak/http-timer': 5.0.1
+      cacheable-lookup: 7.0.0
+      cacheable-request: 12.0.1
+      decompress-response: 6.0.0
+      form-data-encoder: 4.0.2
+      http2-wrapper: 2.2.1
+      lowercase-keys: 3.0.0
+      p-cancelable: 4.0.1
+      responselike: 3.0.0
+      type-fest: 4.27.0
+    dev: false
+
+  /graceful-fs@4.2.11:
+    resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
+    dev: true
+
+  /graphemer@1.4.0:
+    resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==}
+    dev: true
+
+  /has-bigints@1.0.2:
+    resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==}
+    dev: true
+
+  /has-flag@4.0.0:
+    resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /has-property-descriptors@1.0.2:
+    resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==}
+    dependencies:
+      es-define-property: 1.0.0
+    dev: true
+
+  /has-proto@1.0.3:
+    resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /has-symbols@1.0.3:
+    resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /has-tostringtag@1.0.2:
+    resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      has-symbols: 1.0.3
+    dev: true
+
+  /hasown@2.0.2:
+    resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      function-bind: 1.1.2
+    dev: true
+
+  /http-cache-semantics@4.1.1:
+    resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==}
+    dev: false
+
+  /http-proxy-agent@7.0.2:
+    resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==}
+    engines: {node: '>= 14'}
+    dependencies:
+      agent-base: 7.1.1
+      debug: 4.3.7
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /http2-wrapper@2.2.1:
+    resolution: {integrity: sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==}
+    engines: {node: '>=10.19.0'}
+    dependencies:
+      quick-lru: 5.1.1
+      resolve-alpn: 1.2.1
+    dev: false
+
+  /https-proxy-agent@7.0.5:
+    resolution: {integrity: sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==}
+    engines: {node: '>= 14'}
+    dependencies:
+      agent-base: 7.1.1
+      debug: 4.3.7
+    transitivePeerDependencies:
+      - supports-color
+    dev: false
+
+  /human-signals@5.0.0:
+    resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==}
+    engines: {node: '>=16.17.0'}
+    dev: true
+
+  /ignore@5.3.2:
+    resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==}
+    engines: {node: '>= 4'}
+    dev: true
+
+  /import-fresh@3.3.0:
+    resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
+    engines: {node: '>=6'}
+    dependencies:
+      parent-module: 1.0.1
+      resolve-from: 4.0.0
+    dev: true
+
+  /imurmurhash@0.1.4:
+    resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
+    engines: {node: '>=0.8.19'}
+    dev: true
+
+  /inflight@1.0.6:
+    resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==}
+    deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.
+    dependencies:
+      once: 1.4.0
+      wrappy: 1.0.2
+    dev: true
+
+  /inherits@2.0.4:
+    resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
+    dev: true
+
+  /internal-slot@1.0.7:
+    resolution: {integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      es-errors: 1.3.0
+      hasown: 2.0.2
+      side-channel: 1.0.6
+    dev: true
+
+  /is-array-buffer@3.0.4:
+    resolution: {integrity: sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      get-intrinsic: 1.2.4
+    dev: true
+
+  /is-bigint@1.0.4:
+    resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==}
+    dependencies:
+      has-bigints: 1.0.2
+    dev: true
+
+  /is-boolean-object@1.1.2:
+    resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      has-tostringtag: 1.0.2
+    dev: true
+
+  /is-bun-module@1.2.1:
+    resolution: {integrity: sha512-AmidtEM6D6NmUiLOvvU7+IePxjEjOzra2h0pSrsfSAcXwl/83zLLXDByafUJy9k/rKK0pvXMLdwKwGHlX2Ke6Q==}
+    dependencies:
+      semver: 7.6.3
+    dev: true
+
+  /is-callable@1.2.7:
+    resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /is-core-module@2.15.1:
+    resolution: {integrity: sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      hasown: 2.0.2
+    dev: true
+
+  /is-data-view@1.0.1:
+    resolution: {integrity: sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      is-typed-array: 1.1.13
+    dev: true
+
+  /is-date-object@1.0.5:
+    resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      has-tostringtag: 1.0.2
+    dev: true
+
+  /is-extglob@2.1.1:
+    resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /is-fullwidth-code-point@3.0.0:
+    resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /is-glob@4.0.3:
+    resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
+    engines: {node: '>=0.10.0'}
+    dependencies:
+      is-extglob: 2.1.1
+    dev: true
+
+  /is-negative-zero@2.0.3:
+    resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /is-number-object@1.0.7:
+    resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      has-tostringtag: 1.0.2
+    dev: true
+
+  /is-number@7.0.0:
+    resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
+    engines: {node: '>=0.12.0'}
+    dev: true
+
+  /is-path-inside@3.0.3:
+    resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /is-regex@1.1.4:
+    resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      has-tostringtag: 1.0.2
+    dev: true
+
+  /is-shared-array-buffer@1.0.3:
+    resolution: {integrity: sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+    dev: true
+
+  /is-stream@3.0.0:
+    resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==}
+    engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+    dev: true
+
+  /is-stream@4.0.1:
+    resolution: {integrity: sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==}
+    engines: {node: '>=18'}
+    dev: false
+
+  /is-string@1.0.7:
+    resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      has-tostringtag: 1.0.2
+    dev: true
+
+  /is-symbol@1.0.4:
+    resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      has-symbols: 1.0.3
+    dev: true
+
+  /is-typed-array@1.1.13:
+    resolution: {integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      which-typed-array: 1.1.15
+    dev: true
+
+  /is-weakref@1.0.2:
+    resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==}
+    dependencies:
+      call-bind: 1.0.7
+    dev: true
+
+  /isarray@2.0.5:
+    resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==}
+    dev: true
+
+  /isexe@2.0.0:
+    resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
+    dev: true
+
+  /jackspeak@3.4.3:
+    resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==}
+    dependencies:
+      '@isaacs/cliui': 8.0.2
+    optionalDependencies:
+      '@pkgjs/parseargs': 0.11.0
+    dev: true
+
+  /javascript-natural-sort@0.7.1:
+    resolution: {integrity: sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==}
+    dev: true
+
+  /joycon@3.1.1:
+    resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==}
+    engines: {node: '>=10'}
+    dev: true
+
+  /js-tokens@4.0.0:
+    resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
+    dev: true
+
+  /js-tokens@9.0.0:
+    resolution: {integrity: sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==}
+    dev: true
+
+  /js-yaml@4.1.0:
+    resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==}
+    hasBin: true
+    dependencies:
+      argparse: 2.0.1
+    dev: true
+
+  /jsesc@2.5.2:
+    resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==}
+    engines: {node: '>=4'}
+    hasBin: true
+    dev: true
+
+  /jsesc@3.0.2:
+    resolution: {integrity: sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==}
+    engines: {node: '>=6'}
+    hasBin: true
+    dev: true
+
+  /json-buffer@3.0.1:
+    resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==}
+
+  /json-schema-traverse@0.4.1:
+    resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
+    dev: true
+
+  /json-stable-stringify-without-jsonify@1.0.1:
+    resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
+    dev: true
+
+  /json5@1.0.2:
+    resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==}
+    hasBin: true
+    dependencies:
+      minimist: 1.2.8
+    dev: true
+
+  /jsx-ast-utils@3.3.5:
+    resolution: {integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==}
+    engines: {node: '>=4.0'}
+    dependencies:
+      array-includes: 3.1.8
+      array.prototype.flat: 1.3.2
+      object.assign: 4.1.5
+      object.values: 1.2.0
+    dev: true
+
+  /keyv@4.5.4:
+    resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==}
+    dependencies:
+      json-buffer: 3.0.1
+
+  /language-subtag-registry@0.3.23:
+    resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==}
+    dev: true
+
+  /language-tags@1.0.9:
+    resolution: {integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==}
+    engines: {node: '>=0.10'}
+    dependencies:
+      language-subtag-registry: 0.3.23
+    dev: true
+
+  /levn@0.4.1:
+    resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==}
+    engines: {node: '>= 0.8.0'}
+    dependencies:
+      prelude-ls: 1.2.1
+      type-check: 0.4.0
+    dev: true
+
+  /lilconfig@3.1.2:
+    resolution: {integrity: sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==}
+    engines: {node: '>=14'}
+    dev: true
+
+  /lines-and-columns@1.2.4:
+    resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
+    dev: true
+
+  /load-tsconfig@0.2.5:
+    resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==}
+    engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+    dev: true
+
+  /local-pkg@0.5.1:
+    resolution: {integrity: sha512-9rrA30MRRP3gBD3HTGnC6cDFpaE1kVDWxWgqWJUN0RvDNAo+Nz/9GxB+nHOH0ifbVFy0hSA1V6vFDvnx54lTEQ==}
+    engines: {node: '>=14'}
+    dependencies:
+      mlly: 1.7.3
+      pkg-types: 1.2.1
+    dev: true
+
+  /locate-path@6.0.0:
+    resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
+    engines: {node: '>=10'}
+    dependencies:
+      p-locate: 5.0.0
+    dev: true
+
+  /lodash.camelcase@4.3.0:
+    resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==}
+    dev: true
+
+  /lodash.kebabcase@4.1.1:
+    resolution: {integrity: sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==}
+    dev: true
+
+  /lodash.merge@4.6.2:
+    resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
+    dev: true
+
+  /lodash.snakecase@4.1.1:
+    resolution: {integrity: sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==}
+    dev: true
+
+  /lodash.sortby@4.7.0:
+    resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==}
+    dev: true
+
+  /lodash.upperfirst@4.3.1:
+    resolution: {integrity: sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==}
+    dev: true
+
+  /lodash@4.17.21:
+    resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
+    dev: true
+
+  /loupe@2.3.7:
+    resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==}
+    dependencies:
+      get-func-name: 2.0.2
+    dev: true
+
+  /lowercase-keys@3.0.0:
+    resolution: {integrity: sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==}
+    engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+    dev: false
+
+  /lru-cache@10.4.3:
+    resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==}
+    dev: true
+
+  /magic-string@0.30.13:
+    resolution: {integrity: sha512-8rYBO+MsWkgjDSOvLomYnzhdwEG51olQ4zL5KXnNJWV5MNmrb4rTZdrtkhxjnD/QyZUqR/Z/XDsUs/4ej2nx0g==}
+    dependencies:
+      '@jridgewell/sourcemap-codec': 1.5.0
+    dev: true
+
+  /merge-stream@2.0.0:
+    resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
+    dev: true
+
+  /merge2@1.4.1:
+    resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
+    engines: {node: '>= 8'}
+    dev: true
+
+  /micromatch@4.0.8:
+    resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
+    engines: {node: '>=8.6'}
+    dependencies:
+      braces: 3.0.3
+      picomatch: 2.3.1
+    dev: true
+
+  /mime-db@1.52.0:
+    resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
+    engines: {node: '>= 0.6'}
+    dev: false
+
+  /mime-types@2.1.35:
+    resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
+    engines: {node: '>= 0.6'}
+    dependencies:
+      mime-db: 1.52.0
+    dev: false
+
+  /mimic-fn@4.0.0:
+    resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==}
+    engines: {node: '>=12'}
+    dev: true
+
+  /mimic-response@3.1.0:
+    resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==}
+    engines: {node: '>=10'}
+    dev: false
+
+  /mimic-response@4.0.0:
+    resolution: {integrity: sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==}
+    engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+    dev: false
+
+  /minimatch@3.1.2:
+    resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
+    dependencies:
+      brace-expansion: 1.1.11
+
+  /minimatch@9.0.5:
+    resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==}
+    engines: {node: '>=16 || 14 >=14.17'}
+    dependencies:
+      brace-expansion: 2.0.1
+    dev: true
+
+  /minimist@1.2.8:
+    resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
+    dev: true
+
+  /minipass@7.1.2:
+    resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==}
+    engines: {node: '>=16 || 14 >=14.17'}
+    dev: true
+
+  /mlly@1.7.3:
+    resolution: {integrity: sha512-xUsx5n/mN0uQf4V548PKQ+YShA4/IW0KI1dZhrNrPCLG+xizETbHTkOa1f8/xut9JRPp8kQuMnz0oqwkTiLo/A==}
+    dependencies:
+      acorn: 8.14.0
+      pathe: 1.1.2
+      pkg-types: 1.2.1
+      ufo: 1.5.4
+    dev: true
+
+  /ms@2.1.3:
+    resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
+
+  /mz@2.7.0:
+    resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==}
+    dependencies:
+      any-promise: 1.3.0
+      object-assign: 4.1.1
+      thenify-all: 1.6.0
+    dev: true
+
+  /nanoid@3.3.7:
+    resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==}
+    engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
+    hasBin: true
+    dev: true
+
+  /natural-compare@1.4.0:
+    resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
+    dev: true
+
+  /node-fetch@2.7.0:
+    resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==}
+    engines: {node: 4.x || >=6.0.0}
+    peerDependencies:
+      encoding: ^0.1.0
+    peerDependenciesMeta:
+      encoding:
+        optional: true
+    dependencies:
+      whatwg-url: 5.0.0
+    dev: false
+
+  /node-releases@2.0.18:
+    resolution: {integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==}
+    dev: true
+
+  /normalize-url@8.0.1:
+    resolution: {integrity: sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==}
+    engines: {node: '>=14.16'}
+    dev: false
+
+  /npm-run-path@5.3.0:
+    resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==}
+    engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+    dependencies:
+      path-key: 4.0.0
+    dev: true
+
+  /object-assign@4.1.1:
+    resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /object-inspect@1.13.3:
+    resolution: {integrity: sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /object-keys@1.1.1:
+    resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /object.assign@4.1.5:
+    resolution: {integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      has-symbols: 1.0.3
+      object-keys: 1.1.1
+    dev: true
+
+  /object.fromentries@2.0.8:
+    resolution: {integrity: sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-object-atoms: 1.0.0
+    dev: true
+
+  /object.groupby@1.0.3:
+    resolution: {integrity: sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+    dev: true
+
+  /object.values@1.2.0:
+    resolution: {integrity: sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-object-atoms: 1.0.0
+    dev: true
+
+  /once@1.4.0:
+    resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
+    dependencies:
+      wrappy: 1.0.2
+    dev: true
+
+  /onetime@6.0.0:
+    resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==}
+    engines: {node: '>=12'}
+    dependencies:
+      mimic-fn: 4.0.0
+    dev: true
+
+  /optionator@0.9.4:
+    resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==}
+    engines: {node: '>= 0.8.0'}
+    dependencies:
+      deep-is: 0.1.4
+      fast-levenshtein: 2.0.6
+      levn: 0.4.1
+      prelude-ls: 1.2.1
+      type-check: 0.4.0
+      word-wrap: 1.2.5
+    dev: true
+
+  /p-cancelable@4.0.1:
+    resolution: {integrity: sha512-wBowNApzd45EIKdO1LaU+LrMBwAcjfPaYtVzV3lmfM3gf8Z4CHZsiIqlM8TZZ8okYvh5A1cP6gTfCRQtwUpaUg==}
+    engines: {node: '>=14.16'}
+    dev: false
+
+  /p-limit@3.1.0:
+    resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==}
+    engines: {node: '>=10'}
+    dependencies:
+      yocto-queue: 0.1.0
+    dev: true
+
+  /p-limit@5.0.0:
+    resolution: {integrity: sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==}
+    engines: {node: '>=18'}
+    dependencies:
+      yocto-queue: 1.1.1
+    dev: true
+
+  /p-locate@5.0.0:
+    resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
+    engines: {node: '>=10'}
+    dependencies:
+      p-limit: 3.1.0
+    dev: true
+
+  /package-json-from-dist@1.0.1:
+    resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==}
+    dev: true
+
+  /parent-module@1.0.1:
+    resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
+    engines: {node: '>=6'}
+    dependencies:
+      callsites: 3.1.0
+    dev: true
+
+  /path-exists@4.0.0:
+    resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /path-is-absolute@1.0.1:
+    resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /path-key@3.1.1:
+    resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /path-key@4.0.0:
+    resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==}
+    engines: {node: '>=12'}
+    dev: true
+
+  /path-parse@1.0.7:
+    resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
+    dev: true
+
+  /path-scurry@1.11.1:
+    resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==}
+    engines: {node: '>=16 || 14 >=14.18'}
+    dependencies:
+      lru-cache: 10.4.3
+      minipass: 7.1.2
+    dev: true
+
+  /path-type@4.0.0:
+    resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /pathe@1.1.2:
+    resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==}
+    dev: true
+
+  /pathval@1.1.1:
+    resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==}
+    dev: true
+
+  /picocolors@1.1.1:
+    resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
+    dev: true
+
+  /picomatch@2.3.1:
+    resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
+    engines: {node: '>=8.6'}
+    dev: true
+
+  /picomatch@4.0.2:
+    resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==}
+    engines: {node: '>=12'}
+    dev: true
+
+  /pirates@4.0.6:
+    resolution: {integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==}
+    engines: {node: '>= 6'}
+    dev: true
+
+  /pkg-types@1.2.1:
+    resolution: {integrity: sha512-sQoqa8alT3nHjGuTjuKgOnvjo4cljkufdtLMnO2LBP/wRwuDlo1tkaEdMxCRhyGRPacv/ztlZgDPm2b7FAmEvw==}
+    dependencies:
+      confbox: 0.1.8
+      mlly: 1.7.3
+      pathe: 1.1.2
+    dev: true
+
+  /possible-typed-array-names@1.0.0:
+    resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /postcss-load-config@6.0.1:
+    resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==}
+    engines: {node: '>= 18'}
+    peerDependencies:
+      jiti: '>=1.21.0'
+      postcss: '>=8.0.9'
+      tsx: ^4.8.1
+      yaml: ^2.4.2
+    peerDependenciesMeta:
+      jiti:
+        optional: true
+      postcss:
+        optional: true
+      tsx:
+        optional: true
+      yaml:
+        optional: true
+    dependencies:
+      lilconfig: 3.1.2
+    dev: true
+
+  /postcss@8.4.49:
+    resolution: {integrity: sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==}
+    engines: {node: ^10 || ^12 || >=14}
+    dependencies:
+      nanoid: 3.3.7
+      picocolors: 1.1.1
+      source-map-js: 1.2.1
+    dev: true
+
+  /prelude-ls@1.2.1:
+    resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
+    engines: {node: '>= 0.8.0'}
+    dev: true
+
+  /prettier-linter-helpers@1.0.0:
+    resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==}
+    engines: {node: '>=6.0.0'}
+    dependencies:
+      fast-diff: 1.3.0
+    dev: true
+
+  /prettier@3.3.3:
+    resolution: {integrity: sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==}
+    engines: {node: '>=14'}
+    hasBin: true
+    dev: true
+
+  /pretty-format@29.7.0:
+    resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}
+    engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
+    dependencies:
+      '@jest/schemas': 29.6.3
+      ansi-styles: 5.2.0
+      react-is: 18.3.1
+    dev: true
+
+  /punycode@2.3.1:
+    resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==}
+    engines: {node: '>=6'}
+    dev: true
+
+  /queue-microtask@1.2.3:
+    resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
+    dev: true
+
+  /quick-lru@5.1.1:
+    resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==}
+    engines: {node: '>=10'}
+    dev: false
+
+  /react-is@18.3.1:
+    resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==}
+    dev: true
+
+  /readdirp@4.0.2:
+    resolution: {integrity: sha512-yDMz9g+VaZkqBYS/ozoBJwaBhTbZo3UNYQHNRw1D3UFQB8oHB4uS/tAODO+ZLjGWmUbKnIlOWO+aaIiAxrUWHA==}
+    engines: {node: '>= 14.16.0'}
+    dev: true
+
+  /regexp.prototype.flags@1.5.3:
+    resolution: {integrity: sha512-vqlC04+RQoFalODCbCumG2xIOvapzVMHwsyIGM/SIE8fRhFFsXeH8/QQ+s0T0kDAhKc4k30s73/0ydkHQz6HlQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-errors: 1.3.0
+      set-function-name: 2.0.2
+    dev: true
+
+  /resolve-alpn@1.2.1:
+    resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==}
+    dev: false
+
+  /resolve-from@4.0.0:
+    resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
+    engines: {node: '>=4'}
+    dev: true
+
+  /resolve-from@5.0.0:
+    resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /resolve-pkg-maps@1.0.0:
+    resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
+    dev: true
+
+  /resolve@1.22.8:
+    resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==}
+    hasBin: true
+    dependencies:
+      is-core-module: 2.15.1
+      path-parse: 1.0.7
+      supports-preserve-symlinks-flag: 1.0.0
+    dev: true
+
+  /responselike@3.0.0:
+    resolution: {integrity: sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==}
+    engines: {node: '>=14.16'}
+    dependencies:
+      lowercase-keys: 3.0.0
+    dev: false
+
+  /reusify@1.0.4:
+    resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
+    engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
+    dev: true
+
+  /rimraf@3.0.2:
+    resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==}
+    deprecated: Rimraf versions prior to v4 are no longer supported
+    hasBin: true
+    dependencies:
+      glob: 7.2.3
+    dev: true
+
+  /rollup@4.27.3:
+    resolution: {integrity: sha512-SLsCOnlmGt9VoZ9Ek8yBK8tAdmPHeppkw+Xa7yDlCEhDTvwYei03JlWo1fdc7YTfLZ4tD8riJCUyAgTbszk1fQ==}
+    engines: {node: '>=18.0.0', npm: '>=8.0.0'}
+    hasBin: true
+    dependencies:
+      '@types/estree': 1.0.6
+    optionalDependencies:
+      '@rollup/rollup-android-arm-eabi': 4.27.3
+      '@rollup/rollup-android-arm64': 4.27.3
+      '@rollup/rollup-darwin-arm64': 4.27.3
+      '@rollup/rollup-darwin-x64': 4.27.3
+      '@rollup/rollup-freebsd-arm64': 4.27.3
+      '@rollup/rollup-freebsd-x64': 4.27.3
+      '@rollup/rollup-linux-arm-gnueabihf': 4.27.3
+      '@rollup/rollup-linux-arm-musleabihf': 4.27.3
+      '@rollup/rollup-linux-arm64-gnu': 4.27.3
+      '@rollup/rollup-linux-arm64-musl': 4.27.3
+      '@rollup/rollup-linux-powerpc64le-gnu': 4.27.3
+      '@rollup/rollup-linux-riscv64-gnu': 4.27.3
+      '@rollup/rollup-linux-s390x-gnu': 4.27.3
+      '@rollup/rollup-linux-x64-gnu': 4.27.3
+      '@rollup/rollup-linux-x64-musl': 4.27.3
+      '@rollup/rollup-win32-arm64-msvc': 4.27.3
+      '@rollup/rollup-win32-ia32-msvc': 4.27.3
+      '@rollup/rollup-win32-x64-msvc': 4.27.3
+      fsevents: 2.3.3
+    dev: true
+
+  /run-parallel@1.2.0:
+    resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
+    dependencies:
+      queue-microtask: 1.2.3
+    dev: true
+
+  /safe-array-concat@1.1.2:
+    resolution: {integrity: sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==}
+    engines: {node: '>=0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      get-intrinsic: 1.2.4
+      has-symbols: 1.0.3
+      isarray: 2.0.5
+    dev: true
+
+  /safe-buffer@5.2.1:
+    resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
+    dev: false
+
+  /safe-regex-test@1.0.3:
+    resolution: {integrity: sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      is-regex: 1.1.4
+    dev: true
+
+  /sax@1.4.1:
+    resolution: {integrity: sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==}
+    dev: false
+
+  /semver@6.3.1:
+    resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
+    hasBin: true
+
+  /semver@7.6.3:
+    resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==}
+    engines: {node: '>=10'}
+    hasBin: true
+    dev: true
+
+  /set-function-length@1.2.2:
+    resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      define-data-property: 1.1.4
+      es-errors: 1.3.0
+      function-bind: 1.1.2
+      get-intrinsic: 1.2.4
+      gopd: 1.0.1
+      has-property-descriptors: 1.0.2
+    dev: true
+
+  /set-function-name@2.0.2:
+    resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      define-data-property: 1.1.4
+      es-errors: 1.3.0
+      functions-have-names: 1.2.3
+      has-property-descriptors: 1.0.2
+    dev: true
+
+  /shebang-command@2.0.0:
+    resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
+    engines: {node: '>=8'}
+    dependencies:
+      shebang-regex: 3.0.0
+    dev: true
+
+  /shebang-regex@3.0.0:
+    resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /side-channel@1.0.6:
+    resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      get-intrinsic: 1.2.4
+      object-inspect: 1.13.3
+    dev: true
+
+  /siginfo@2.0.0:
+    resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==}
+    dev: true
+
+  /signal-exit@4.1.0:
+    resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==}
+    engines: {node: '>=14'}
+    dev: true
+
+  /slash@3.0.0:
+    resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /source-map-js@1.2.1:
+    resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /source-map@0.5.7:
+    resolution: {integrity: sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /source-map@0.8.0-beta.0:
+    resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==}
+    engines: {node: '>= 8'}
+    dependencies:
+      whatwg-url: 7.1.0
+    dev: true
+
+  /stackback@0.0.2:
+    resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==}
+    dev: true
+
+  /std-env@3.8.0:
+    resolution: {integrity: sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==}
+    dev: true
+
+  /string-width@4.2.3:
+    resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
+    engines: {node: '>=8'}
+    dependencies:
+      emoji-regex: 8.0.0
+      is-fullwidth-code-point: 3.0.0
+      strip-ansi: 6.0.1
+    dev: true
+
+  /string-width@5.1.2:
+    resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==}
+    engines: {node: '>=12'}
+    dependencies:
+      eastasianwidth: 0.2.0
+      emoji-regex: 9.2.2
+      strip-ansi: 7.1.0
+    dev: true
+
+  /string.prototype.includes@2.0.1:
+    resolution: {integrity: sha512-o7+c9bW6zpAdJHTtujeePODAhkuicdAryFsfVKwA+wGw89wJ4GTY484WTucM9hLtDEOpOvI+aHnzqnC5lHp4Rg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+    dev: true
+
+  /string.prototype.trim@1.2.9:
+    resolution: {integrity: sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-abstract: 1.23.5
+      es-object-atoms: 1.0.0
+    dev: true
+
+  /string.prototype.trimend@1.0.8:
+    resolution: {integrity: sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-object-atoms: 1.0.0
+    dev: true
+
+  /string.prototype.trimstart@1.0.8:
+    resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      define-properties: 1.2.1
+      es-object-atoms: 1.0.0
+    dev: true
+
+  /strip-ansi@6.0.1:
+    resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
+    engines: {node: '>=8'}
+    dependencies:
+      ansi-regex: 5.0.1
+    dev: true
+
+  /strip-ansi@7.1.0:
+    resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==}
+    engines: {node: '>=12'}
+    dependencies:
+      ansi-regex: 6.1.0
+    dev: true
+
+  /strip-bom@3.0.0:
+    resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==}
+    engines: {node: '>=4'}
+    dev: true
+
+  /strip-final-newline@3.0.0:
+    resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==}
+    engines: {node: '>=12'}
+    dev: true
+
+  /strip-json-comments@3.1.1:
+    resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
+    engines: {node: '>=8'}
+    dev: true
+
+  /strip-literal@2.1.0:
+    resolution: {integrity: sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==}
+    dependencies:
+      js-tokens: 9.0.0
+    dev: true
+
+  /strnum@1.0.5:
+    resolution: {integrity: sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==}
+    dev: false
+
+  /sucrase@3.35.0:
+    resolution: {integrity: sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==}
+    engines: {node: '>=16 || 14 >=14.17'}
+    hasBin: true
+    dependencies:
+      '@jridgewell/gen-mapping': 0.3.5
+      commander: 4.1.1
+      glob: 10.4.5
+      lines-and-columns: 1.2.4
+      mz: 2.7.0
+      pirates: 4.0.6
+      ts-interface-checker: 0.1.13
+    dev: true
+
+  /supports-color@7.2.0:
+    resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
+    engines: {node: '>=8'}
+    dependencies:
+      has-flag: 4.0.0
+    dev: true
+
+  /supports-preserve-symlinks-flag@1.0.0:
+    resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
+    engines: {node: '>= 0.4'}
+    dev: true
+
+  /svg-element-attributes@1.3.1:
+    resolution: {integrity: sha512-Bh05dSOnJBf3miNMqpsormfNtfidA/GxQVakhtn0T4DECWKeXQRQUceYjJ+OxYiiLdGe4Jo9iFV8wICFapFeIA==}
+    dev: true
+
+  /synckit@0.9.2:
+    resolution: {integrity: sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw==}
+    engines: {node: ^14.18.0 || >=16.0.0}
+    dependencies:
+      '@pkgr/core': 0.1.1
+      tslib: 2.8.1
+    dev: true
+
+  /tapable@2.2.1:
+    resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==}
+    engines: {node: '>=6'}
+    dev: true
+
+  /text-table@0.2.0:
+    resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==}
+    dev: true
+
+  /thenify-all@1.6.0:
+    resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==}
+    engines: {node: '>=0.8'}
+    dependencies:
+      thenify: 3.3.1
+    dev: true
+
+  /thenify@3.3.1:
+    resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==}
+    dependencies:
+      any-promise: 1.3.0
+    dev: true
+
+  /tinybench@2.9.0:
+    resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==}
+    dev: true
+
+  /tinyexec@0.3.1:
+    resolution: {integrity: sha512-WiCJLEECkO18gwqIp6+hJg0//p23HXp4S+gGtAKu3mI2F2/sXC4FvHvXvB0zJVVaTPhx1/tOwdbRsa1sOBIKqQ==}
+    dev: true
+
+  /tinyglobby@0.2.10:
+    resolution: {integrity: sha512-Zc+8eJlFMvgatPZTl6A9L/yht8QqdmUNtURHaKZLmKBE12hNPSrqNkUp2cs3M/UKmNVVAMFQYSjYIVHDjW5zew==}
+    engines: {node: '>=12.0.0'}
+    dependencies:
+      fdir: 6.4.2(picomatch@4.0.2)
+      picomatch: 4.0.2
+    dev: true
+
+  /tinypool@0.8.4:
+    resolution: {integrity: sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==}
+    engines: {node: '>=14.0.0'}
+    dev: true
+
+  /tinyspy@2.2.1:
+    resolution: {integrity: sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==}
+    engines: {node: '>=14.0.0'}
+    dev: true
+
+  /to-fast-properties@2.0.0:
+    resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==}
+    engines: {node: '>=4'}
+    dev: true
+
+  /to-regex-range@5.0.1:
+    resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
+    engines: {node: '>=8.0'}
+    dependencies:
+      is-number: 7.0.0
+    dev: true
+
+  /tr46@0.0.3:
+    resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
+    dev: false
+
+  /tr46@1.0.1:
+    resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==}
+    dependencies:
+      punycode: 2.3.1
+    dev: true
+
+  /tree-kill@1.2.2:
+    resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
+    hasBin: true
+    dev: true
+
+  /ts-api-utils@1.4.0(typescript@5.6.3):
+    resolution: {integrity: sha512-032cPxaEKwM+GT3vA5JXNzIaizx388rhsSW79vGRNGXfRRAdEAn2mvk36PvK5HnOchyWZ7afLEXqYCvPCrzuzQ==}
+    engines: {node: '>=16'}
+    peerDependencies:
+      typescript: '>=4.2.0'
+    dependencies:
+      typescript: 5.6.3
+    dev: true
+
+  /ts-interface-checker@0.1.13:
+    resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==}
+    dev: true
+
+  /tsconfig-paths@3.15.0:
+    resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==}
+    dependencies:
+      '@types/json5': 0.0.29
+      json5: 1.0.2
+      minimist: 1.2.8
+      strip-bom: 3.0.0
+    dev: true
+
+  /tslib@1.14.1:
+    resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==}
+    dev: false
+
+  /tslib@2.8.1:
+    resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
+
+  /tsup@8.3.5(typescript@5.6.3):
+    resolution: {integrity: sha512-Tunf6r6m6tnZsG9GYWndg0z8dEV7fD733VBFzFJ5Vcm1FtlXB8xBD/rtrBi2a3YKEV7hHtxiZtW5EAVADoe1pA==}
+    engines: {node: '>=18'}
+    hasBin: true
+    peerDependencies:
+      '@microsoft/api-extractor': ^7.36.0
+      '@swc/core': ^1
+      postcss: ^8.4.12
+      typescript: '>=4.5.0'
+    peerDependenciesMeta:
+      '@microsoft/api-extractor':
+        optional: true
+      '@swc/core':
+        optional: true
+      postcss:
+        optional: true
+      typescript:
+        optional: true
+    dependencies:
+      bundle-require: 5.0.0(esbuild@0.24.0)
+      cac: 6.7.14
+      chokidar: 4.0.1
+      consola: 3.2.3
+      debug: 4.3.7
+      esbuild: 0.24.0
+      joycon: 3.1.1
+      picocolors: 1.1.1
+      postcss-load-config: 6.0.1
+      resolve-from: 5.0.0
+      rollup: 4.27.3
+      source-map: 0.8.0-beta.0
+      sucrase: 3.35.0
+      tinyexec: 0.3.1
+      tinyglobby: 0.2.10
+      tree-kill: 1.2.2
+      typescript: 5.6.3
+    transitivePeerDependencies:
+      - jiti
+      - supports-color
+      - tsx
+      - yaml
+    dev: true
+
+  /tunnel@0.0.6:
+    resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==}
+    engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'}
+    dev: false
+
+  /type-check@0.4.0:
+    resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==}
+    engines: {node: '>= 0.8.0'}
+    dependencies:
+      prelude-ls: 1.2.1
+    dev: true
+
+  /type-detect@4.1.0:
+    resolution: {integrity: sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==}
+    engines: {node: '>=4'}
+    dev: true
+
+  /type-fest@0.20.2:
+    resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==}
+    engines: {node: '>=10'}
+    dev: true
+
+  /type-fest@4.27.0:
+    resolution: {integrity: sha512-3IMSWgP7C5KSQqmo1wjhKrwsvXAtF33jO3QY+Uy++ia7hqvgSK6iXbbg5PbDBc1P2ZbNEDgejOrN4YooXvhwCw==}
+    engines: {node: '>=16'}
+    dev: false
+
+  /typed-array-buffer@1.0.2:
+    resolution: {integrity: sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      es-errors: 1.3.0
+      is-typed-array: 1.1.13
+    dev: true
+
+  /typed-array-byte-length@1.0.1:
+    resolution: {integrity: sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      for-each: 0.3.3
+      gopd: 1.0.1
+      has-proto: 1.0.3
+      is-typed-array: 1.1.13
+    dev: true
+
+  /typed-array-byte-offset@1.0.2:
+    resolution: {integrity: sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      available-typed-arrays: 1.0.7
+      call-bind: 1.0.7
+      for-each: 0.3.3
+      gopd: 1.0.1
+      has-proto: 1.0.3
+      is-typed-array: 1.1.13
+    dev: true
+
+  /typed-array-length@1.0.6:
+    resolution: {integrity: sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      call-bind: 1.0.7
+      for-each: 0.3.3
+      gopd: 1.0.1
+      has-proto: 1.0.3
+      is-typed-array: 1.1.13
+      possible-typed-array-names: 1.0.0
+    dev: true
+
+  /typescript@5.6.3:
+    resolution: {integrity: sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==}
+    engines: {node: '>=14.17'}
+    hasBin: true
+    dev: true
+
+  /ufo@1.5.4:
+    resolution: {integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==}
+    dev: true
+
+  /unbox-primitive@1.0.2:
+    resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==}
+    dependencies:
+      call-bind: 1.0.7
+      has-bigints: 1.0.2
+      has-symbols: 1.0.3
+      which-boxed-primitive: 1.0.2
+    dev: true
+
+  /undici@5.28.4:
+    resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==}
+    engines: {node: '>=14.0'}
+    dependencies:
+      '@fastify/busboy': 2.1.1
+    dev: false
+
+  /update-browserslist-db@1.1.1(browserslist@4.24.2):
+    resolution: {integrity: sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==}
+    hasBin: true
+    peerDependencies:
+      browserslist: '>= 4.21.0'
+    dependencies:
+      browserslist: 4.24.2
+      escalade: 3.2.0
+      picocolors: 1.1.1
+    dev: true
+
+  /uri-js@4.4.1:
+    resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==}
+    dependencies:
+      punycode: 2.3.1
+    dev: true
+
+  /uuid@8.3.2:
+    resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
+    hasBin: true
+    dev: false
+
+  /vite-node@1.6.0:
+    resolution: {integrity: sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==}
+    engines: {node: ^18.0.0 || >=20.0.0}
+    hasBin: true
+    dependencies:
+      cac: 6.7.14
+      debug: 4.3.7
+      pathe: 1.1.2
+      picocolors: 1.1.1
+      vite: 5.4.11
+    transitivePeerDependencies:
+      - '@types/node'
+      - less
+      - lightningcss
+      - sass
+      - sass-embedded
+      - stylus
+      - sugarss
+      - supports-color
+      - terser
+    dev: true
+
+  /vite@5.4.11:
+    resolution: {integrity: sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==}
+    engines: {node: ^18.0.0 || >=20.0.0}
+    hasBin: true
+    peerDependencies:
+      '@types/node': ^18.0.0 || >=20.0.0
+      less: '*'
+      lightningcss: ^1.21.0
+      sass: '*'
+      sass-embedded: '*'
+      stylus: '*'
+      sugarss: '*'
+      terser: ^5.4.0
+    peerDependenciesMeta:
+      '@types/node':
+        optional: true
+      less:
+        optional: true
+      lightningcss:
+        optional: true
+      sass:
+        optional: true
+      sass-embedded:
+        optional: true
+      stylus:
+        optional: true
+      sugarss:
+        optional: true
+      terser:
+        optional: true
+    dependencies:
+      esbuild: 0.21.5
+      postcss: 8.4.49
+      rollup: 4.27.3
+    optionalDependencies:
+      fsevents: 2.3.3
+    dev: true
+
+  /vitest@1.6.0:
+    resolution: {integrity: sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==}
+    engines: {node: ^18.0.0 || >=20.0.0}
+    hasBin: true
+    peerDependencies:
+      '@edge-runtime/vm': '*'
+      '@types/node': ^18.0.0 || >=20.0.0
+      '@vitest/browser': 1.6.0
+      '@vitest/ui': 1.6.0
+      happy-dom: '*'
+      jsdom: '*'
+    peerDependenciesMeta:
+      '@edge-runtime/vm':
+        optional: true
+      '@types/node':
+        optional: true
+      '@vitest/browser':
+        optional: true
+      '@vitest/ui':
+        optional: true
+      happy-dom:
+        optional: true
+      jsdom:
+        optional: true
+    dependencies:
+      '@vitest/expect': 1.6.0
+      '@vitest/runner': 1.6.0
+      '@vitest/snapshot': 1.6.0
+      '@vitest/spy': 1.6.0
+      '@vitest/utils': 1.6.0
+      acorn-walk: 8.3.4
+      chai: 4.5.0
+      debug: 4.3.7
+      execa: 8.0.1
+      local-pkg: 0.5.1
+      magic-string: 0.30.13
+      pathe: 1.1.2
+      picocolors: 1.1.1
+      std-env: 3.8.0
+      strip-literal: 2.1.0
+      tinybench: 2.9.0
+      tinypool: 0.8.4
+      vite: 5.4.11
+      vite-node: 1.6.0
+      why-is-node-running: 2.3.0
+    transitivePeerDependencies:
+      - less
+      - lightningcss
+      - sass
+      - sass-embedded
+      - stylus
+      - sugarss
+      - supports-color
+      - terser
+    dev: true
+
+  /webidl-conversions@3.0.1:
+    resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
+    dev: false
+
+  /webidl-conversions@4.0.2:
+    resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==}
+    dev: true
+
+  /whatwg-url@5.0.0:
+    resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
+    dependencies:
+      tr46: 0.0.3
+      webidl-conversions: 3.0.1
+    dev: false
+
+  /whatwg-url@7.1.0:
+    resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==}
+    dependencies:
+      lodash.sortby: 4.7.0
+      tr46: 1.0.1
+      webidl-conversions: 4.0.2
+    dev: true
+
+  /which-boxed-primitive@1.0.2:
+    resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==}
+    dependencies:
+      is-bigint: 1.0.4
+      is-boolean-object: 1.1.2
+      is-number-object: 1.0.7
+      is-string: 1.0.7
+      is-symbol: 1.0.4
+    dev: true
+
+  /which-typed-array@1.1.15:
+    resolution: {integrity: sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==}
+    engines: {node: '>= 0.4'}
+    dependencies:
+      available-typed-arrays: 1.0.7
+      call-bind: 1.0.7
+      for-each: 0.3.3
+      gopd: 1.0.1
+      has-tostringtag: 1.0.2
+    dev: true
+
+  /which@2.0.2:
+    resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
+    engines: {node: '>= 8'}
+    hasBin: true
+    dependencies:
+      isexe: 2.0.0
+    dev: true
+
+  /why-is-node-running@2.3.0:
+    resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==}
+    engines: {node: '>=8'}
+    hasBin: true
+    dependencies:
+      siginfo: 2.0.0
+      stackback: 0.0.2
+    dev: true
+
+  /word-wrap@1.2.5:
+    resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==}
+    engines: {node: '>=0.10.0'}
+    dev: true
+
+  /wrap-ansi@7.0.0:
+    resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
+    engines: {node: '>=10'}
+    dependencies:
+      ansi-styles: 4.3.0
+      string-width: 4.2.3
+      strip-ansi: 6.0.1
+    dev: true
+
+  /wrap-ansi@8.1.0:
+    resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==}
+    engines: {node: '>=12'}
+    dependencies:
+      ansi-styles: 6.2.1
+      string-width: 5.1.2
+      strip-ansi: 7.1.0
+    dev: true
+
+  /wrappy@1.0.2:
+    resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
+    dev: true
+
+  /xml2js@0.5.0:
+    resolution: {integrity: sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==}
+    engines: {node: '>=4.0.0'}
+    dependencies:
+      sax: 1.4.1
+      xmlbuilder: 11.0.1
+    dev: false
+
+  /xmlbuilder@11.0.1:
+    resolution: {integrity: sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==}
+    engines: {node: '>=4.0'}
+    dev: false
+
+  /yocto-queue@0.1.0:
+    resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
+    engines: {node: '>=10'}
+    dev: true
+
+  /yocto-queue@1.1.1:
+    resolution: {integrity: sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==}
+    engines: {node: '>=12.20'}
+    dev: true
+
+  github.com/DeterminateSystems/detsys-ts/eb87094f35072ac911526ad052c3437c9e0c42d6:
+    resolution: {tarball: https://codeload.github.com/DeterminateSystems/detsys-ts/tar.gz/eb87094f35072ac911526ad052c3437c9e0c42d6}
+    name: detsys-ts
+    version: 1.0.0
+    dependencies:
+      '@actions/cache': 3.3.0
+      '@actions/core': 1.11.1
+      '@actions/exec': 1.1.1
+      got: 14.4.4
+      type-fest: 4.27.0
+    transitivePeerDependencies:
+      - encoding
+      - supports-color
+    dev: false
diff --git a/prettier.config.cjs b/prettier.config.cjs
new file mode 100644
index 0000000..ea184ad
--- /dev/null
+++ b/prettier.config.cjs
@@ -0,0 +1,12 @@
+/** @type {import('prettier').Config} */
+module.exports = {
+  plugins: [require.resolve("@trivago/prettier-plugin-sort-imports")],
+  semi: true,
+  singleQuote: false,
+  tabWidth: 2,
+  trailingComma: "all",
+  useTabs: false,
+  // Import sorting
+  importOrderSeparation: true,
+  importOrderSortSpecifiers: true,
+};
diff --git a/shell.nix b/shell.nix
new file mode 100644
index 0000000..4f7af74
--- /dev/null
+++ b/shell.nix
@@ -0,0 +1,6 @@
+(import
+  (fetchTarball {
+    url = "https://github.com/edolstra/flake-compat/archive/99f1c2157fba4bfe6211a321fd0ee43199025dbf.tar.gz";
+    sha256 = "0x2jn3vrawwv9xp15674wjz9pixwjyj3j771izayl962zziivbx2";
+  })
+  { src = ./.; }).shellNix
diff --git a/src/index.ts b/src/index.ts
new file mode 100644
index 0000000..75ce696
--- /dev/null
+++ b/src/index.ts
@@ -0,0 +1,76 @@
+import { makeNixCommandArgs } from "./nix.js";
+import * as actionsCore from "@actions/core";
+import * as actionsExec from "@actions/exec";
+import { DetSysAction, inputs } from "detsys-ts";
+
+const EVENT_EXECUTION_FAILURE = "execution_failure";
+
+class UpdateFlakeLockAction extends DetSysAction {
+  private commitMessage: string;
+  private nixOptions: string[];
+  private flakeInputs: string[];
+  private pathToFlakeDir: string | null;
+
+  constructor() {
+    super({
+      name: "update-flake-lock",
+      fetchStyle: "universal",
+      requireNix: "fail",
+    });
+
+    this.commitMessage = inputs.getString("commit-msg");
+    this.flakeInputs = inputs.getArrayOfStrings("inputs", "space");
+    this.nixOptions = inputs.getArrayOfStrings("nix-options", "space");
+    this.pathToFlakeDir = inputs.getStringOrNull("path-to-flake-dir");
+  }
+
+  async main(): Promise<void> {
+    await this.update();
+  }
+
+  // No post phase
+  async post(): Promise<void> {}
+
+  async update(): Promise<void> {
+    // Nix command of this form:
+    // nix ${maybe nix options} flake ${"update" or "lock"} ${maybe --update-input flags} --commit-lock-file --commit-lockfile-summary ${commit message}
+    // Example commands:
+    // nix --extra-substituters https://example.com flake lock --update-input nixpkgs --commit-lock-file --commit-lockfile-summary "updated flake.lock"
+    // nix flake update --commit-lock-file --commit-lockfile-summary "updated flake.lock"
+    const nixCommandArgs: string[] = makeNixCommandArgs(
+      this.nixOptions,
+      this.flakeInputs,
+      this.commitMessage,
+    );
+
+    actionsCore.debug(
+      JSON.stringify({
+        options: this.nixOptions,
+        inputs: this.flakeInputs,
+        message: this.commitMessage,
+        args: nixCommandArgs,
+      }),
+    );
+
+    const execOptions: actionsExec.ExecOptions = {
+      cwd: this.pathToFlakeDir !== null ? this.pathToFlakeDir : undefined,
+    };
+
+    const exitCode = await actionsExec.exec("nix", nixCommandArgs, execOptions);
+
+    if (exitCode !== 0) {
+      this.recordEvent(EVENT_EXECUTION_FAILURE, {
+        exitCode,
+      });
+      actionsCore.setFailed(`non-zero exit code of ${exitCode} detected`);
+    } else {
+      actionsCore.info(`flake.lock file was successfully updated`);
+    }
+  }
+}
+
+function main(): void {
+  new UpdateFlakeLockAction().execute();
+}
+
+main();
diff --git a/src/nix.test.ts b/src/nix.test.ts
new file mode 100644
index 0000000..ebc7640
--- /dev/null
+++ b/src/nix.test.ts
@@ -0,0 +1,77 @@
+import { makeNixCommandArgs } from "./nix.js";
+import { expect, test } from "vitest";
+
+type TestCase = {
+  inputs: {
+    nixOptions: string[];
+    flakeInputs: string[];
+    commitMessage: string;
+  };
+  expected: string[];
+};
+
+test("Nix command arguments", () => {
+  const testCases: TestCase[] = [
+    {
+      inputs: {
+        nixOptions: ["--log-format", "raw"],
+        flakeInputs: [],
+        commitMessage: "just testing",
+      },
+      expected: [
+        "--log-format",
+        "raw",
+        "flake",
+        "update",
+        "--commit-lock-file",
+        "--option",
+        "commit-lockfile-summary",
+        "just testing",
+      ],
+    },
+    {
+      inputs: {
+        nixOptions: [],
+        flakeInputs: ["nixpkgs", "rust-overlay"],
+        commitMessage: "just testing",
+      },
+      expected: [
+        "flake",
+        "lock",
+        "--update-input",
+        "nixpkgs",
+        "--update-input",
+        "rust-overlay",
+        "--commit-lock-file",
+        "--option",
+        "commit-lockfile-summary",
+        "just testing",
+      ],
+    },
+    {
+      inputs: {
+        nixOptions: ["--debug"],
+        flakeInputs: [],
+        commitMessage: "just testing",
+      },
+      expected: [
+        "--debug",
+        "flake",
+        "update",
+        "--commit-lock-file",
+        "--option",
+        "commit-lockfile-summary",
+        "just testing",
+      ],
+    },
+  ];
+
+  testCases.forEach(({ inputs, expected }) => {
+    const args = makeNixCommandArgs(
+      inputs.nixOptions,
+      inputs.flakeInputs,
+      inputs.commitMessage,
+    );
+    expect(args).toStrictEqual(expected);
+  });
+});
diff --git a/src/nix.ts b/src/nix.ts
new file mode 100644
index 0000000..e3d829a
--- /dev/null
+++ b/src/nix.ts
@@ -0,0 +1,31 @@
+// Build the Nix args out of inputs from the Actions environment
+export function makeNixCommandArgs(
+  nixOptions: string[],
+  flakeInputs: string[],
+  commitMessage: string,
+): string[] {
+  const flakeInputFlags = flakeInputs.flatMap((input) => [
+    "--update-input",
+    input,
+  ]);
+
+  // NOTE(cole-h): In Nix versions 2.23.0 and later, `commit-lockfile-summary` became an alias to
+  // the setting `commit-lock-file-summary` (https://github.com/NixOS/nix/pull/10691), and Nix does
+  // not treat aliases the same as their "real" setting by requiring setting aliases to be
+  // configured via `--option <alias name> <option value>`
+  // (https://github.com/NixOS/nix/issues/10989).
+  // So, we go the long way so that we can support versions both before and after Nix 2.23.0.
+  const lockfileSummaryFlags = [
+    "--option",
+    "commit-lockfile-summary",
+    commitMessage,
+  ];
+
+  const updateLockMechanism = flakeInputFlags.length === 0 ? "update" : "lock";
+
+  return nixOptions
+    .concat(["flake", updateLockMechanism])
+    .concat(flakeInputFlags)
+    .concat(["--commit-lock-file"])
+    .concat(lockfileSummaryFlags);
+}
diff --git a/test b/test
new file mode 100644
index 0000000..e69de29
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 0000000..370cfa1
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,15 @@
+{
+  "compilerOptions": {
+    "target": "ES2020" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
+    "module": "Node16",
+    "moduleResolution": "NodeNext",
+    "outDir": "./dist",
+    "rootDir": "./src",
+    "strict": true /* Enable all strict type-checking options. */,
+    "noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
+    "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
+    "resolveJsonModule": true,
+    "declaration": true
+  },
+  "exclude": ["node_modules", "**/*.test.ts", "dist"]
+}
diff --git a/tsup.config.ts b/tsup.config.ts
new file mode 100644
index 0000000..b05748a
--- /dev/null
+++ b/tsup.config.ts
@@ -0,0 +1,16 @@
+import { name } from "./package.json";
+import { defineConfig } from "tsup";
+
+export default defineConfig({
+  name,
+  entry: ["src/index.ts"],
+  format: ["esm"],
+  target: "node20",
+  bundle: true,
+  splitting: false,
+  sourcemap: true,
+  clean: true,
+  dts: {
+    resolve: true,
+  },
+});