diff --git a/.github/workflows/test-special-chars.yml b/.github/workflows/test-special-chars.yml new file mode 100644 index 00000000..40001565 --- /dev/null +++ b/.github/workflows/test-special-chars.yml @@ -0,0 +1,115 @@ +name: Test special chars +on: +# # Uncomment when test added first time to register workflow and comment it back after workflow would be registered +# # +# # Added pull_request to register workflow from the PR. +# # Read more https://stackoverflow.com/questions/63362126/github-actions-how-to-run-a-workflow-created-on-a-non-master-branch-from-the-wo +# pull_request: {} + workflow_dispatch: + inputs: + paths: + description: 'A multiline yaml string of key/value pairs for each Name and Path to include in the comment' + required: false + default: | + Application: application + Dashboard: /dashboard + +jobs: + setup: + runs-on: ubuntu-latest + strategy: + matrix: + target: ["one", "two"] + steps: + - name: Setup + run: echo "Do setup" + + - name: Checkout + uses: actions/checkout@v3 + + - uses: ./ + id: writer + with: + matrix-step-name: ${{ github.job }} + matrix-key: ${{ matrix.target }} + outputs: |- + result2: ${{ matrix.target }} + test: "comment: URL for deploy: https://frontend-docs-259.website.dev" + test2: "comment: URL for deploy: ' https://frontend-docs-259.website.dev" + test3: ${{ toJson(inputs.paths) }} + + - uses: nick-fields/assert-action@v1 + with: + expected: ${{ matrix.target }} + actual: ${{ fromJson(steps.writer.outputs.result).result2 }} + + - uses: nick-fields/assert-action@v1 + with: + expected: ${{ matrix.target }} + actual: ${{ steps.writer.outputs.result2 }} + + - uses: nick-fields/assert-action@v1 + with: + expected: "comment: URL for deploy: https://frontend-docs-259.website.dev" + actual: ${{ fromJson(steps.writer.outputs.result).test }} + + - uses: nick-fields/assert-action@v1 + with: + expected: "comment: URL for deploy: https://frontend-docs-259.website.dev" + actual: ${{ steps.writer.outputs.test }} + + - uses: nick-fields/assert-action@v1 + with: + expected: "comment: URL for deploy: ' https://frontend-docs-259.website.dev" + actual: ${{ fromJson(steps.writer.outputs.result).test2 }} + + - uses: nick-fields/assert-action@v1 + with: + expected: "comment: URL for deploy: ' https://frontend-docs-259.website.dev" + actual: ${{ steps.writer.outputs.test2 }} + + - uses: nick-fields/assert-action@v1 + with: + expected: | + Application: application + Dashboard: /dashboard + actual: ${{ fromJson(steps.writer.outputs.result).test3 }} + + - uses: nick-fields/assert-action@v1 + with: + expected: | + Application: application + Dashboard: /dashboard + actual: ${{ steps.writer.outputs.test3 }} + + test: + runs-on: ubuntu-latest + continue-on-error: true + needs: [setup] + steps: + - uses: actions/download-artifact@v3 + + - id: current + run: |- + echo "result=$(ls | wc -l)" >> $GITHUB_OUTPUT + + outputs: + result: "${{ steps.current.outputs.result }}" + + assert: + runs-on: ubuntu-latest + needs: [test] + steps: + - uses: nick-fields/assert-action@v1 + with: + expected: '2' + actual: "${{ needs.test.outputs.result }}" + + + teardown: + runs-on: ubuntu-latest + needs: [assert] + if: ${{ always() }} + steps: + - name: Tear down + run: echo "Do Tear down" diff --git a/.github/workflows/test-validation.yml b/.github/workflows/test-validation.yml new file mode 100644 index 00000000..f68fd49f --- /dev/null +++ b/.github/workflows/test-validation.yml @@ -0,0 +1,66 @@ +name: Test validation +on: +# # Uncomment when test added first time to register workflow and comment it back after workflow would be registered +# # +# # Added pull_request to register workflow from the PR. +# # Read more https://stackoverflow.com/questions/63362126/github-actions-how-to-run-a-workflow-created-on-a-non-master-branch-from-the-wo +# pull_request: {} + workflow_dispatch: {} + +jobs: + setup: + runs-on: ubuntu-latest + steps: + - name: Setup + run: echo "Do setup" + + test: + runs-on: ubuntu-latest + strategy: + matrix: + target: ["one", "two"] + continue-on-error: true + needs: [setup] + steps: + - name: Checkout + uses: actions/checkout@v3 + + - uses: ./ + id: current + with: + matrix-step-name: ${{ github.job }} + matrix-key: ${{ matrix.target }} + outputs: |- + result: ${{ matrix.target }} + test: comment: URL for deploy: https://frontend-docs-259.website.dev + + - uses: nick-fields/assert-action@v1 + with: + expected: ${{ matrix.target }} + actual: ${{ fromJson(steps.writer.outputs.result).result }} + + - uses: nick-fields/assert-action@v1 + with: + expected: "comment: URL for deploy: https://frontend-docs-259.website.dev" + actual: ${{ fromJson(steps.writer.outputs.result).test }} + + outputs: + result: "${{ steps.current.outcome }}" + + assert: + runs-on: ubuntu-latest + needs: [test] + steps: + - uses: nick-fields/assert-action@v1 + with: + expected: 'failure' + actual: "${{ needs.test.outputs.result }}" + + + teardown: + runs-on: ubuntu-latest + needs: [assert] + if: ${{ always() }} + steps: + - name: Tear down + run: echo "Do Tear down" diff --git a/README.yaml b/README.yaml index 76bce588..4111ae3e 100644 --- a/README.yaml +++ b/README.yaml @@ -95,6 +95,8 @@ usage: |- matrix-key: ${{ matrix.platform }} outputs: |- image: ${{ steps.build.outputs.image }}:${{ steps.build.outputs.tag }} + ## Multiline string + tags: ${{ toJson(steps.build.outputs.image) }} ## Read matrix outputs read: @@ -195,6 +197,7 @@ usage: |- outputs: image: ${{ fromJson(steps.out.outputs.result).image }} + image_alternative: ${{ steps.out.outputs.image }} ``` Then you can use the workflow with matrix diff --git a/action.yml b/action.yml index c5529018..423a7164 100644 --- a/action.yml +++ b/action.yml @@ -16,69 +16,7 @@ inputs: description: "YAML structured map of outputs" outputs: result: - description: "Outputs result" - value: "${{ steps.proxy.outputs.result }}" + description: "Outputs result (Deprecated!!!)" runs: - using: "composite" - steps: - - uses: nick-fields/assert-action@v1 - if: ${{ inputs.matrix-key != '' }} - with: - expected: '' - actual: "${{ inputs.matrix-step-name }}" - comparison: notEqual - - - uses: nick-fields/assert-action@v1 - if: ${{ inputs.matrix-step-name != '' }} - with: - expected: '' - actual: "${{ inputs.matrix-key }}" - comparison: notEqual - - - name: 'Install jq 1.6' - uses: dcarbone/install-jq-action@v1.0.1 - with: - version: 1.6 - force: 'true' - - - name: 'Setup yq' - uses: dcarbone/install-yq-action@v1.0.1 - with: - version: v4.28.1 - download-compressed: true - force: true - - - shell: bash - id: proxy - run: |- - JSON=$(echo "${{ inputs.outputs }}" | yq '. + {}' -o json | jq . -c -M -e) - echo "result=${JSON}" >> $GITHUB_OUTPUT - - - uses: cloudposse/github-action-yaml-config-query@0.1.1 - id: mask - with: - query: .${{ inputs.matrix-key == '' }} - config: |- - true: - mask: ". + {}" - false: - mask: "{\"${{ inputs.matrix-key}}\": . + {}}" - - - shell: bash - id: write - run: |- - JSON=$(echo "${{ inputs.outputs }}" | yq '${{ steps.mask.outputs.mask }}' -o json | jq . -c -M -e) - echo "result=${JSON}" >> $GITHUB_OUTPUT - - - shell: bash - if: ${{ inputs.outputs != '' && inputs.matrix-step-name != '' }} - run: | - echo '${{ steps.write.outputs.result }}' > ${{ inputs.matrix-step-name }} - id: matrix - - - uses: actions/upload-artifact@v3 - if: ${{ inputs.outputs != '' && inputs.matrix-step-name != '' }} - with: - name: ${{ hashFiles( inputs.matrix-step-name ) || 'none' }} - path: ${{ inputs.matrix-step-name }} - if-no-files-found: warn + using: "node16" + main: "index.js" diff --git a/index.js b/index.js new file mode 100644 index 00000000..2b29b969 --- /dev/null +++ b/index.js @@ -0,0 +1,89 @@ +const core = require('@actions/core'); +const yaml = require('yaml') +const artifact = require('@actions/artifact'); +const crypto = require('crypto'); +const fs = require('fs'); + +try { + const step_name = core.getInput('matrix-step-name'); + const matrix_key = core.getInput('matrix-key'); + const outputs = core.getInput('outputs'); + + core.debug("step_name:") + core.debug(step_name) + + core.debug("matrix_key:") + core.debug(matrix_key) + + core.debug("outputs:") + core.debug(outputs) + + function isEmptyInput(value) { + return value === null || value.trim() === ""; + } + + if (isEmptyInput(step_name) && !isEmptyInput(matrix_key)) { + core.setFailed("`matrix-step-name` can not be empty when `matrix-key` specified"); + return + } + + if (!isEmptyInput(step_name) && isEmptyInput(matrix_key)) { + core.setFailed("`matrix-key` can not be empty when `matrix-step-name` specified"); + return + } + + if (!isEmptyInput(outputs)) { + try { + yaml.parse(outputs) + } + catch (error) { + message = `Outputs should be valid YAML +--------------------- +${outputs} +--------------------- +${error}`; + core.setFailed(message); + return + } + } + + const outputs_struct = !isEmptyInput(outputs) ? yaml.parse(outputs) : {} + + Object.keys(outputs_struct).forEach(function(key, index) { + core.setOutput(key, outputs_struct[key]) + }); + + core.debug("outputs_struct:") + core.debug(outputs_struct) + + core.debug("JSON.stringify(outputs_struct):") + core.debug(JSON.stringify(outputs_struct)) + + core.setOutput('result', JSON.stringify(outputs_struct)) + + if (!isEmptyInput(outputs)) { + const artifact_content = isEmptyInput(matrix_key) ? outputs_struct : { matrix_key: outputs_struct } + + fs.writeFileSync("./" + step_name, JSON.stringify(artifact_content)); + const fileBuffer = fs.readFileSync("./" + step_name); + const hashSum = crypto.createHash('sha256'); + hashSum.update(fileBuffer); + + const hex = hashSum.digest('hex'); + + const artifactClient = artifact.create() + const artifactName = hex; + const files = [ + "./" + step_name, + ] + + const rootDirectory = '.' // Also possible to use __dirname + const options = { + continueOnError: false + } + + const uploadResponse = artifactClient.uploadArtifact(artifactName, files, rootDirectory, options) + } +} catch (error) { + core.setFailed(error.message); +} \ No newline at end of file diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf new file mode 120000 index 00000000..4cd49a49 --- /dev/null +++ b/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/.bin/uuid b/node_modules/.bin/uuid new file mode 120000 index 00000000..588f70ec --- /dev/null +++ b/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 00000000..3d4e01cb --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,193 @@ +{ + "name": "matrix-outputs-write", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@actions/artifact": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.1.tgz", + "integrity": "sha512-Vv4y0EW0ptEkU+Pjs5RGS/0EryTvI6s79LjSV9Gg/h+O3H/ddpjhuX/Bi/HZE4pbNPyjGtQjbdFWphkZhmgabA==", + "dependencies": { + "@actions/core": "^1.9.1", + "@actions/http-client": "^2.0.1", + "tmp": "^0.2.1", + "tmp-promise": "^3.0.2" + } + }, + "node_modules/@actions/core": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", + "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } + }, + "node_modules/@actions/http-client": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz", + "integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==", + "dependencies": { + "tunnel": "^0.0.6" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/crypto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz", + "integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==", + "deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in." + }, + "node_modules/fs": { + "version": "0.0.1-security", + "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", + "integrity": "sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w==" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dependencies": { + "rimraf": "^3.0.0" + }, + "engines": { + "node": ">=8.17.0" + } + }, + "node_modules/tmp-promise": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "dependencies": { + "tmp": "^0.2.0" + } + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yaml": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.1.tgz", + "integrity": "sha512-e0WHiYql7+9wr4cWMx3TVQrNwejKaEe7/rHNmQmqRjazfOP5W8PB6Jpebb5o6fIapbz9o9+2ipcaTM2ZwDI6lw==", + "engines": { + "node": ">= 14" + } + } + } +} diff --git a/node_modules/@actions/artifact/LICENSE.md b/node_modules/@actions/artifact/LICENSE.md new file mode 100644 index 00000000..dbae2edb --- /dev/null +++ b/node_modules/@actions/artifact/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/artifact/README.md b/node_modules/@actions/artifact/README.md new file mode 100644 index 00000000..cdab7fdf --- /dev/null +++ b/node_modules/@actions/artifact/README.md @@ -0,0 +1,213 @@ +# `@actions/artifact` + +## Usage + +You can use this package to interact with the actions artifacts. +- [Upload an Artifact](#Upload-an-Artifact) +- [Download a Single Artifact](#Download-a-Single-Artifact) +- [Download All Artifacts](#Download-all-Artifacts) +- [Additional Documentation](#Additional-Documentation) +- [Contributions](#Contributions) + +Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory. + +## Upload an Artifact + +Method Name: `uploadArtifact` + +#### Inputs + - `name` + - The name of the artifact that is being uploaded + - Required + - `files` + - A list of file paths that describe what should be uploaded as part of the artifact + - If a path is provided that does not exist, an error will be thrown + - Can be absolute or relative. Internally everything is normalized and resolved + - Required + - `rootDirectory` + - A file path that denotes the root directory of the files being uploaded. This path is used to strip the paths provided in `files` to control how they are uploaded and structured + - If a file specified in `files` is not in the `rootDirectory`, an error will be thrown + - Required + - `options` + - Extra options that allow for the customization of the upload behavior + - Optional + +#### Available Options + + - `continueOnError` + - Indicates if the artifact upload should continue in the event a file fails to upload. If there is a error during upload, a partial artifact will always be created and available for download at the end. The `size` reported will be the amount of storage that the user or org will be charged for the partial artifact. + - If set to `false`, and an error is encountered, all other uploads will stop and any files that were queued will not be attempted to be uploaded. The partial artifact available will only include files up until the failure. + - If set to `true` and an error is encountered, the failed file will be skipped and ignored and all other queued files will be attempted to be uploaded. There will be an artifact available for download at the end with everything excluding the file that failed to upload + - Optional, defaults to `true` if not specified +- `retentionDays` + - Duration after which artifact will expire in days + - Minimum value: 1 + - Maximum value: 90 unless changed by repository setting + - If this is set to a greater value than the retention settings allowed, the retention on artifacts will be reduced to match the max value allowed on the server, and the upload process will continue. An input of 0 assumes default retention value. + +#### Example using Absolute File Paths + +```js +const artifact = require('@actions/artifact'); +const artifactClient = artifact.create() +const artifactName = 'my-artifact'; +const files = [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' +] +const rootDirectory = '/home/user/files/plz-upload' +const options = { + continueOnError: true +} + +const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options) +``` + +#### Example using Relative File Paths +```js +// Assuming the current working directory is /home/user/files/plz-upload +const artifact = require('@actions/artifact'); +const artifactClient = artifact.create() +const artifactName = 'my-artifact'; +const files = [ + 'file1.txt', + 'file2.txt', + 'dir/file3.txt' +] + +const rootDirectory = '.' // Also possible to use __dirname +const options = { + continueOnError: false +} + +const uploadResponse = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options) +``` + +#### Upload Result + +The returned `UploadResponse` will contain the following information + +- `artifactName` + - The name of the artifact that was uploaded +- `artifactItems` + - A list of all files that describe what is uploaded if there are no errors encountered. Usually this will be equal to the inputted `files` with the exception of empty directories (will not be uploaded) +- `size` + - Total size of the artifact that was uploaded in bytes +- `failedItems` + - A list of items that were not uploaded successfully (this will include queued items that were not uploaded if `continueOnError` is set to false). This is a subset of `artifactItems` + +## Download a Single Artifact + +Method Name: `downloadArtifact` + +#### Inputs + - `name` + - The name of the artifact to download + - Required + - `path` + - Path that denotes where the artifact will be downloaded to + - Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified + - `options` + - Extra options that allow for the customization of the download behavior + - Optional + + +#### Available Options + + - `createArtifactFolder` + - Specifies if a folder (the artifact name) is created for the artifact that is downloaded (contents downloaded into this folder), + - Optional. Defaults to false if not specified + +#### Example + +```js +const artifact = require('@actions/artifact'); +const artifactClient = artifact.create() +const artifactName = 'my-artifact'; +const path = 'some/directory' +const options = { + createArtifactFolder: false +} + +const downloadResponse = await artifactClient.downloadArtifact(artifactName, path, options) + +// Post download, the directory structure will look like this +/some + /directory + /file1.txt + /file2.txt + /dir + /file3.txt + +// If createArtifactFolder is set to true, the directory structure will look like this +/some + /directory + /my-artifact + /file1.txt + /file2.txt + /dir + /file3.txt +``` + +#### Download Response + +The returned `DownloadResponse` will contain the following information + + - `artifactName` + - The name of the artifact that was downloaded + - `downloadPath` + - The full Path to where the artifact was downloaded + + +## Download All Artifacts + +Method Name: `downloadAllArtifacts` + +#### Inputs + - `path` + - Path that denotes where the artifact will be downloaded to + - Optional. Defaults to the GitHub workspace directory(`$GITHUB_WORKSPACE`) if not specified + +```js +const artifact = require('@actions/artifact'); +const artifactClient = artifact.create(); +const downloadResponse = await artifactClient.downloadAllArtifacts(); + +// output result +for (response in downloadResponse) { + console.log(response.artifactName); + console.log(response.downloadPath); +} +``` + +Because there are multiple artifacts, an extra directory (denoted by the name of the artifact) will be created for each artifact in the path. With 2 artifacts(`my-artifact-1` and `my-artifact-2` for example) and the default path, the directory structure will be as follows: +```js +/GITHUB_WORKSPACE + /my-artifact-1 + / .. contents of `my-artifact-1` + /my-artifact-2 + / .. contents of `my-artifact-2` +``` + +#### Download Result + +An array will be returned that describes the results for downloading all artifacts. The number of items in the array indicates the number of artifacts that were downloaded. + +Each artifact will have the same `DownloadResponse` as if it was individually downloaded + - `artifactName` + - The name of the artifact that was downloaded + - `downloadPath` + - The full Path to where the artifact was downloaded + +## Additional Documentation + +Check out [additional-information](docs/additional-information.md) for extra documentation around usage, restrictions and behavior. + +Check out [implementation-details](docs/implementation-details.md) for extra information about the implementation of this package. + +## Contributions + +See [contributor guidelines](https://github.com/actions/toolkit/blob/main/.github/CONTRIBUTING.md) for general guidelines and information about toolkit contributions. + +For contributions related to this package, see [artifact contributions](CONTRIBUTIONS.md) for more information. diff --git a/node_modules/@actions/artifact/lib/artifact-client.d.ts b/node_modules/@actions/artifact/lib/artifact-client.d.ts new file mode 100644 index 00000000..133f1778 --- /dev/null +++ b/node_modules/@actions/artifact/lib/artifact-client.d.ts @@ -0,0 +1,10 @@ +import { UploadOptions } from './internal/upload-options'; +import { UploadResponse } from './internal/upload-response'; +import { DownloadOptions } from './internal/download-options'; +import { DownloadResponse } from './internal/download-response'; +import { ArtifactClient } from './internal/artifact-client'; +export { ArtifactClient, UploadResponse, UploadOptions, DownloadResponse, DownloadOptions }; +/** + * Constructs an ArtifactClient + */ +export declare function create(): ArtifactClient; diff --git a/node_modules/@actions/artifact/lib/artifact-client.js b/node_modules/@actions/artifact/lib/artifact-client.js new file mode 100644 index 00000000..3374f4bd --- /dev/null +++ b/node_modules/@actions/artifact/lib/artifact-client.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const artifact_client_1 = require("./internal/artifact-client"); +/** + * Constructs an ArtifactClient + */ +function create() { + return artifact_client_1.DefaultArtifactClient.create(); +} +exports.create = create; +//# sourceMappingURL=artifact-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/artifact-client.js.map b/node_modules/@actions/artifact/lib/artifact-client.js.map new file mode 100644 index 00000000..6aadc1b0 --- /dev/null +++ b/node_modules/@actions/artifact/lib/artifact-client.js.map @@ -0,0 +1 @@ +{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../src/artifact-client.ts"],"names":[],"mappings":";;;AAIA,gEAAgF;AAUhF;;GAEG;AACH,SAAgB,MAAM;IACpB,OAAO,uCAAqB,CAAC,MAAM,EAAE,CAAA;AACvC,CAAC;AAFD,wBAEC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/artifact-client.d.ts b/node_modules/@actions/artifact/lib/internal/artifact-client.d.ts new file mode 100644 index 00000000..eb0c26b3 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/artifact-client.d.ts @@ -0,0 +1,41 @@ +import { UploadResponse } from './upload-response'; +import { UploadOptions } from './upload-options'; +import { DownloadOptions } from './download-options'; +import { DownloadResponse } from './download-response'; +export interface ArtifactClient { + /** + * Uploads an artifact + * + * @param name the name of the artifact, required + * @param files a list of absolute or relative paths that denote what files should be uploaded + * @param rootDirectory an absolute or relative file path that denotes the root parent directory of the files being uploaded + * @param options extra options for customizing the upload behavior + * @returns single UploadInfo object + */ + uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadOptions): Promise; + /** + * Downloads a single artifact associated with a run + * + * @param name the name of the artifact being downloaded + * @param path optional path that denotes where the artifact will be downloaded to + * @param options extra options that allow for the customization of the download behavior + */ + downloadArtifact(name: string, path?: string, options?: DownloadOptions): Promise; + /** + * Downloads all artifacts associated with a run. Because there are multiple artifacts being downloaded, a folder will be created for each one in the specified or default directory + * @param path optional path that denotes where the artifacts will be downloaded to + */ + downloadAllArtifacts(path?: string): Promise; +} +export declare class DefaultArtifactClient implements ArtifactClient { + /** + * Constructs a DefaultArtifactClient + */ + static create(): DefaultArtifactClient; + /** + * Uploads an artifact + */ + uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadOptions | undefined): Promise; + downloadArtifact(name: string, path?: string | undefined, options?: DownloadOptions | undefined): Promise; + downloadAllArtifacts(path?: string | undefined): Promise; +} diff --git a/node_modules/@actions/artifact/lib/internal/artifact-client.js b/node_modules/@actions/artifact/lib/internal/artifact-client.js new file mode 100644 index 00000000..d937578a --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/artifact-client.js @@ -0,0 +1,178 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultArtifactClient = void 0; +const core = __importStar(require("@actions/core")); +const upload_specification_1 = require("./upload-specification"); +const upload_http_client_1 = require("./upload-http-client"); +const utils_1 = require("./utils"); +const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation"); +const download_http_client_1 = require("./download-http-client"); +const download_specification_1 = require("./download-specification"); +const config_variables_1 = require("./config-variables"); +const path_1 = require("path"); +class DefaultArtifactClient { + /** + * Constructs a DefaultArtifactClient + */ + static create() { + return new DefaultArtifactClient(); + } + /** + * Uploads an artifact + */ + uploadArtifact(name, files, rootDirectory, options) { + return __awaiter(this, void 0, void 0, function* () { + core.info(`Starting artifact upload +For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); + path_and_artifact_name_validation_1.checkArtifactName(name); + // Get specification for the files being uploaded + const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); + const uploadResponse = { + artifactName: name, + artifactItems: [], + size: 0, + failedItems: [] + }; + const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); + if (uploadSpecification.length === 0) { + core.warning(`No files found that can be uploaded`); + } + else { + // Create an entry for the artifact in the file container + const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); + if (!response.fileContainerResourceUrl) { + core.debug(response.toString()); + throw new Error('No URL provided by the Artifact Service to upload an artifact to'); + } + core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + // Upload each of the files that were found concurrently + const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); + // Update the size of the artifact to indicate we are done uploading + // The uncompressed size is used for display when downloading a zip of the artifact from the UI + core.info(`File upload process has finished. Finalizing the artifact upload`); + yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); + if (uploadResult.failedItems.length > 0) { + core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + } + else { + core.info(`Artifact has been finalized. All files have been successfully uploaded!`); + } + core.info(` +The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes +The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage + +Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`); + uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); + uploadResponse.size = uploadResult.uploadSize; + uploadResponse.failedItems = uploadResult.failedItems; + } + return uploadResponse; + }); + } + downloadArtifact(name, path, options) { + return __awaiter(this, void 0, void 0, function* () { + const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); + const artifacts = yield downloadHttpClient.listArtifacts(); + if (artifacts.count === 0) { + throw new Error(`Unable to find any artifacts for the associated workflow`); + } + const artifactToDownload = artifacts.value.find(artifact => { + return artifact.name === name; + }); + if (!artifactToDownload) { + throw new Error(`Unable to find an artifact with the name: ${name}`); + } + const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); + if (!path) { + path = config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories + const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + } + else { + // Create all necessary directories recursively before starting any download + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + core.info('Directory structure has been setup for the artifact'); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + return { + artifactName: name, + downloadPath: downloadSpecification.rootDownloadLocation + }; + }); + } + downloadAllArtifacts(path) { + return __awaiter(this, void 0, void 0, function* () { + const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); + const response = []; + const artifacts = yield downloadHttpClient.listArtifacts(); + if (artifacts.count === 0) { + core.info('Unable to find any artifacts for the associated workflow'); + return response; + } + if (!path) { + path = config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + let downloadedArtifacts = 0; + while (downloadedArtifacts < artifacts.count) { + const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; + downloadedArtifacts += 1; + core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + // Get container entries for the specific artifact + const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); + const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + } + else { + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + response.push({ + artifactName: currentArtifactToDownload.name, + downloadPath: downloadSpecification.rootDownloadLocation + }); + } + return response; + }); + } +} +exports.DefaultArtifactClient = DefaultArtifactClient; +//# sourceMappingURL=artifact-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/artifact-client.js.map b/node_modules/@actions/artifact/lib/internal/artifact-client.js.map new file mode 100644 index 00000000..00240ef2 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/artifact-client.js.map @@ -0,0 +1 @@ +{"version":3,"file":"artifact-client.js","sourceRoot":"","sources":["../../src/internal/artifact-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,iEAG+B;AAC/B,6DAAqD;AAKrD,mCAGgB;AAChB,2FAAqE;AACrE,iEAAyD;AACzD,qEAAiE;AACjE,yDAAwD;AACxD,+BAAuC;AAuCvC,MAAa,qBAAqB;IAChC;;OAEG;IACH,MAAM,CAAC,MAAM;QACX,OAAO,IAAI,qBAAqB,EAAE,CAAA;IACpC,CAAC;IAED;;OAEG;IACG,cAAc,CAClB,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAAmC;;YAEnC,IAAI,CAAC,IAAI,CACP;8MACwM,CACzM,CAAA;YACD,qDAAiB,CAAC,IAAI,CAAC,CAAA;YAEvB,iDAAiD;YACjD,MAAM,mBAAmB,GAA0B,6CAAsB,CACvE,IAAI,EACJ,aAAa,EACb,KAAK,CACN,CAAA;YACD,MAAM,cAAc,GAAmB;gBACrC,YAAY,EAAE,IAAI;gBAClB,aAAa,EAAE,EAAE;gBACjB,IAAI,EAAE,CAAC;gBACP,WAAW,EAAE,EAAE;aAChB,CAAA;YAED,MAAM,gBAAgB,GAAG,IAAI,qCAAgB,EAAE,CAAA;YAE/C,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,qCAAqC,CAAC,CAAA;aACpD;iBAAM;gBACL,yDAAyD;gBACzD,MAAM,QAAQ,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACnE,IAAI,EACJ,OAAO,CACR,CAAA;gBACD,IAAI,CAAC,QAAQ,CAAC,wBAAwB,EAAE;oBACtC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAA;oBAC/B,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE,CAAA;iBACF;gBAED,IAAI,CAAC,KAAK,CAAC,wBAAwB,QAAQ,CAAC,wBAAwB,EAAE,CAAC,CAAA;gBACvE,IAAI,CAAC,IAAI,CACP,2BAA2B,IAAI,oDAAoD,CACpF,CAAA;gBAED,wDAAwD;gBACxD,MAAM,YAAY,GAAG,MAAM,gBAAgB,CAAC,6BAA6B,CACvE,QAAQ,CAAC,wBAAwB,EACjC,mBAAmB,EACnB,OAAO,CACR,CAAA;gBAED,oEAAoE;gBACpE,+FAA+F;gBAC/F,IAAI,CAAC,IAAI,CACP,kEAAkE,CACnE,CAAA;gBACD,MAAM,gBAAgB,CAAC,iBAAiB,CAAC,YAAY,CAAC,SAAS,EAAE,IAAI,CAAC,CAAA;gBAEtE,IAAI,YAAY,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvC,IAAI,CAAC,IAAI,CACP,+BAA+B,YAAY,CAAC,WAAW,CAAC,MAAM,8BAA8B,CAC7F,CAAA;iBACF;qBAAM;oBACL,IAAI,CAAC,IAAI,CACP,yEAAyE,CAC1E,CAAA;iBACF;gBAED,IAAI,CAAC,IAAI,CACP;kEAC0D,YAAY,CAAC,SAAS;kDACtC,YAAY,CAAC,UAAU;;uLAE8G,CAChL,CAAA;gBAED,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,GAAG,CACpD,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAC9B,CAAA;gBACD,cAAc,CAAC,IAAI,GAAG,YAAY,CAAC,UAAU,CAAA;gBAC7C,cAAc,CAAC,WAAW,GAAG,YAAY,CAAC,WAAW,CAAA;aACtD;YACD,OAAO,cAAc,CAAA;QACvB,CAAC;KAAA;IAEK,gBAAgB,CACpB,IAAY,EACZ,IAAyB,EACzB,OAAqC;;YAErC,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,MAAM,IAAI,KAAK,CACb,0DAA0D,CAC3D,CAAA;aACF;YAED,MAAM,kBAAkB,GAAG,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBACzD,OAAO,QAAQ,CAAC,IAAI,KAAK,IAAI,CAAA;YAC/B,CAAC,CAAC,CAAA;YACF,IAAI,CAAC,kBAAkB,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,IAAI,EAAE,CAAC,CAAA;aACrE;YAED,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,kBAAkB,CAAC,IAAI,EACvB,kBAAkB,CAAC,wBAAwB,CAC5C,CAAA;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,wCAAqB,EAAE,CAAA;aAC/B;YACD,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YAEpB,4IAA4I;YAC5I,MAAM,qBAAqB,GAAG,iDAAwB,CACpD,IAAI,EACJ,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,oBAAoB,KAAI,KAAK,CACvC,CAAA;YAED,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;gBACtD,IAAI,CAAC,IAAI,CACP,sDAAsD,kBAAkB,CAAC,IAAI,EAAE,CAChF,CAAA;aACF;iBAAM;gBACL,4EAA4E;gBAC5E,MAAM,oCAA4B,CAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,IAAI,CAAC,IAAI,CAAC,qDAAqD,CAAC,CAAA;gBAChE,MAAM,mCAA2B,CAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;gBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;aACF;YAED,OAAO;gBACL,YAAY,EAAE,IAAI;gBAClB,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;aACzD,CAAA;QACH,CAAC;KAAA;IAEK,oBAAoB,CACxB,IAAyB;;YAEzB,MAAM,kBAAkB,GAAG,IAAI,yCAAkB,EAAE,CAAA;YAEnD,MAAM,QAAQ,GAAuB,EAAE,CAAA;YACvC,MAAM,SAAS,GAAG,MAAM,kBAAkB,CAAC,aAAa,EAAE,CAAA;YAC1D,IAAI,SAAS,CAAC,KAAK,KAAK,CAAC,EAAE;gBACzB,IAAI,CAAC,IAAI,CAAC,0DAA0D,CAAC,CAAA;gBACrE,OAAO,QAAQ,CAAA;aAChB;YAED,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,wCAAqB,EAAE,CAAA;aAC/B;YACD,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YAEpB,IAAI,mBAAmB,GAAG,CAAC,CAAA;YAC3B,OAAO,mBAAmB,GAAG,SAAS,CAAC,KAAK,EAAE;gBAC5C,MAAM,yBAAyB,GAAG,SAAS,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAA;gBACtE,mBAAmB,IAAI,CAAC,CAAA;gBACxB,IAAI,CAAC,IAAI,CACP,iCAAiC,yBAAyB,CAAC,IAAI,MAAM,mBAAmB,IAAI,SAAS,CAAC,KAAK,EAAE,CAC9G,CAAA;gBAED,kDAAkD;gBAClD,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC,iBAAiB,CACtD,yBAAyB,CAAC,IAAI,EAC9B,yBAAyB,CAAC,wBAAwB,CACnD,CAAA;gBAED,MAAM,qBAAqB,GAAG,iDAAwB,CACpD,yBAAyB,CAAC,IAAI,EAC9B,KAAK,CAAC,KAAK,EACX,IAAI,EACJ,IAAI,CACL,CAAA;gBACD,IAAI,qBAAqB,CAAC,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;oBACtD,IAAI,CAAC,IAAI,CACP,qDAAqD,yBAAyB,CAAC,IAAI,EAAE,CACtF,CAAA;iBACF;qBAAM;oBACL,MAAM,oCAA4B,CAChC,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,mCAA2B,CAC/B,qBAAqB,CAAC,kBAAkB,CACzC,CAAA;oBACD,MAAM,kBAAkB,CAAC,sBAAsB,CAC7C,qBAAqB,CAAC,eAAe,CACtC,CAAA;iBACF;gBAED,QAAQ,CAAC,IAAI,CAAC;oBACZ,YAAY,EAAE,yBAAyB,CAAC,IAAI;oBAC5C,YAAY,EAAE,qBAAqB,CAAC,oBAAoB;iBACzD,CAAC,CAAA;aACH;YACD,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;CACF;AAhOD,sDAgOC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/config-variables.d.ts b/node_modules/@actions/artifact/lib/internal/config-variables.d.ts new file mode 100644 index 00000000..8e66a0d1 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/config-variables.d.ts @@ -0,0 +1,11 @@ +export declare function getUploadFileConcurrency(): number; +export declare function getUploadChunkSize(): number; +export declare function getRetryLimit(): number; +export declare function getRetryMultiplier(): number; +export declare function getInitialRetryIntervalInMilliseconds(): number; +export declare function getDownloadFileConcurrency(): number; +export declare function getRuntimeToken(): string; +export declare function getRuntimeUrl(): string; +export declare function getWorkFlowRunId(): string; +export declare function getWorkSpaceDirectory(): string; +export declare function getRetentionDays(): string | undefined; diff --git a/node_modules/@actions/artifact/lib/internal/config-variables.js b/node_modules/@actions/artifact/lib/internal/config-variables.js new file mode 100644 index 00000000..f8a1828e --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/config-variables.js @@ -0,0 +1,72 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; +// The number of concurrent uploads that happens at the same time +function getUploadFileConcurrency() { + return 2; +} +exports.getUploadFileConcurrency = getUploadFileConcurrency; +// When uploading large files that can't be uploaded with a single http call, this controls +// the chunk size that is used during upload +function getUploadChunkSize() { + return 8 * 1024 * 1024; // 8 MB Chunks +} +exports.getUploadChunkSize = getUploadChunkSize; +// The maximum number of retries that can be attempted before an upload or download fails +function getRetryLimit() { + return 5; +} +exports.getRetryLimit = getRetryLimit; +// With exponential backoff, the larger the retry count, the larger the wait time before another attempt +// The retry multiplier controls by how much the backOff time increases depending on the number of retries +function getRetryMultiplier() { + return 1.5; +} +exports.getRetryMultiplier = getRetryMultiplier; +// The initial wait time if an upload or download fails and a retry is being attempted for the first time +function getInitialRetryIntervalInMilliseconds() { + return 3000; +} +exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; +// The number of concurrent downloads that happens at the same time +function getDownloadFileConcurrency() { + return 2; +} +exports.getDownloadFileConcurrency = getDownloadFileConcurrency; +function getRuntimeToken() { + const token = process.env['ACTIONS_RUNTIME_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); + } + return token; +} +exports.getRuntimeToken = getRuntimeToken; +function getRuntimeUrl() { + const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); + } + return runtimeUrl; +} +exports.getRuntimeUrl = getRuntimeUrl; +function getWorkFlowRunId() { + const workFlowRunId = process.env['GITHUB_RUN_ID']; + if (!workFlowRunId) { + throw new Error('Unable to get GITHUB_RUN_ID env variable'); + } + return workFlowRunId; +} +exports.getWorkFlowRunId = getWorkFlowRunId; +function getWorkSpaceDirectory() { + const workspaceDirectory = process.env['GITHUB_WORKSPACE']; + if (!workspaceDirectory) { + throw new Error('Unable to get GITHUB_WORKSPACE env variable'); + } + return workspaceDirectory; +} +exports.getWorkSpaceDirectory = getWorkSpaceDirectory; +function getRetentionDays() { + return process.env['GITHUB_RETENTION_DAYS']; +} +exports.getRetentionDays = getRetentionDays; +//# sourceMappingURL=config-variables.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/config-variables.js.map b/node_modules/@actions/artifact/lib/internal/config-variables.js.map new file mode 100644 index 00000000..3032dd4e --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/config-variables.js.map @@ -0,0 +1 @@ +{"version":3,"file":"config-variables.js","sourceRoot":"","sources":["../../src/internal/config-variables.ts"],"names":[],"mappings":";;;AAAA,iEAAiE;AACjE,SAAgB,wBAAwB;IACtC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,4DAEC;AAED,2FAA2F;AAC3F,4CAA4C;AAC5C,SAAgB,kBAAkB;IAChC,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,cAAc;AACvC,CAAC;AAFD,gDAEC;AAED,yFAAyF;AACzF,SAAgB,aAAa;IAC3B,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,sCAEC;AAED,wGAAwG;AACxG,0GAA0G;AAC1G,SAAgB,kBAAkB;IAChC,OAAO,GAAG,CAAA;AACZ,CAAC;AAFD,gDAEC;AAED,yGAAyG;AACzG,SAAgB,qCAAqC;IACnD,OAAO,IAAI,CAAA;AACb,CAAC;AAFD,sFAEC;AAED,mEAAmE;AACnE,SAAgB,0BAA0B;IACxC,OAAO,CAAC,CAAA;AACV,CAAC;AAFD,gEAEC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;KACpE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC;AAED,SAAgB,aAAa;IAC3B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAA;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;KAClE;IACD,OAAO,UAAU,CAAA;AACnB,CAAC;AAND,sCAMC;AAED,SAAgB,gBAAgB;IAC9B,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA;IAClD,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAA;KAC5D;IACD,OAAO,aAAa,CAAA;AACtB,CAAC;AAND,4CAMC;AAED,SAAgB,qBAAqB;IACnC,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IAC1D,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAA;KAC/D;IACD,OAAO,kBAAkB,CAAA;AAC3B,CAAC;AAND,sDAMC;AAED,SAAgB,gBAAgB;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;AAC7C,CAAC;AAFD,4CAEC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/contracts.d.ts b/node_modules/@actions/artifact/lib/internal/contracts.d.ts new file mode 100644 index 00000000..943f689f --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/contracts.d.ts @@ -0,0 +1,67 @@ +export interface ArtifactResponse { + containerId: string; + size: number; + signedContent: string; + fileContainerResourceUrl: string; + type: string; + name: string; + url: string; +} +export interface CreateArtifactParameters { + Type: string; + Name: string; + RetentionDays?: number; +} +export interface PatchArtifactSize { + Size: number; +} +export interface PatchArtifactSizeSuccessResponse { + containerId: number; + size: number; + signedContent: string; + type: string; + name: string; + url: string; + uploadUrl: string; +} +export interface UploadResults { + /** + * The size in bytes of data that was transferred during the upload process to the actions backend service. This takes into account possible + * gzip compression to reduce the amount of data that needs to be transferred + */ + uploadSize: number; + /** + * The raw size of the files that were specified for upload + */ + totalSize: number; + /** + * An array of files that failed to upload + */ + failedItems: string[]; +} +export interface ListArtifactsResponse { + count: number; + value: ArtifactResponse[]; +} +export interface QueryArtifactResponse { + count: number; + value: ContainerEntry[]; +} +export interface ContainerEntry { + containerId: number; + scopeIdentifier: string; + path: string; + itemType: string; + status: string; + fileLength?: number; + fileEncoding?: number; + fileType?: number; + dateCreated: string; + dateLastModified: string; + createdBy: string; + lastModifiedBy: string; + itemLocation: string; + contentLocation: string; + fileId?: number; + contentId: string; +} diff --git a/node_modules/@actions/artifact/lib/internal/contracts.js b/node_modules/@actions/artifact/lib/internal/contracts.js new file mode 100644 index 00000000..705cd38a --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/contracts.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=contracts.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/contracts.js.map b/node_modules/@actions/artifact/lib/internal/contracts.js.map new file mode 100644 index 00000000..4bd17e2a --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/contracts.js.map @@ -0,0 +1 @@ +{"version":3,"file":"contracts.js","sourceRoot":"","sources":["../../src/internal/contracts.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/crc64.d.ts b/node_modules/@actions/artifact/lib/internal/crc64.d.ts new file mode 100644 index 00000000..bab5caed --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/crc64.d.ts @@ -0,0 +1,21 @@ +/** + * CRC64: cyclic redundancy check, 64-bits + * + * In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to + * validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg, + * but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go + * + * This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that + * is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27 + */ +/// +export declare type CRC64DigestEncoding = 'hex' | 'base64' | 'buffer'; +declare class CRC64 { + private _crc; + constructor(); + update(data: Buffer | string): void; + digest(encoding?: CRC64DigestEncoding): string | Buffer; + private toBuffer; + static flip64Bits(n: bigint): bigint; +} +export default CRC64; diff --git a/node_modules/@actions/artifact/lib/internal/crc64.js b/node_modules/@actions/artifact/lib/internal/crc64.js new file mode 100644 index 00000000..56971fd1 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/crc64.js @@ -0,0 +1,303 @@ +"use strict"; +/** + * CRC64: cyclic redundancy check, 64-bits + * + * In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to + * validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg, + * but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go + * + * This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that + * is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27 + */ +Object.defineProperty(exports, "__esModule", { value: true }); +// when transpile target is >= ES2020 (after dropping node 12) these can be changed to bigint literals - ts(2737) +const PREGEN_POLY_TABLE = [ + BigInt('0x0000000000000000'), + BigInt('0x7F6EF0C830358979'), + BigInt('0xFEDDE190606B12F2'), + BigInt('0x81B31158505E9B8B'), + BigInt('0xC962E5739841B68F'), + BigInt('0xB60C15BBA8743FF6'), + BigInt('0x37BF04E3F82AA47D'), + BigInt('0x48D1F42BC81F2D04'), + BigInt('0xA61CECB46814FE75'), + BigInt('0xD9721C7C5821770C'), + BigInt('0x58C10D24087FEC87'), + BigInt('0x27AFFDEC384A65FE'), + BigInt('0x6F7E09C7F05548FA'), + BigInt('0x1010F90FC060C183'), + BigInt('0x91A3E857903E5A08'), + BigInt('0xEECD189FA00BD371'), + BigInt('0x78E0FF3B88BE6F81'), + BigInt('0x078E0FF3B88BE6F8'), + BigInt('0x863D1EABE8D57D73'), + BigInt('0xF953EE63D8E0F40A'), + BigInt('0xB1821A4810FFD90E'), + BigInt('0xCEECEA8020CA5077'), + BigInt('0x4F5FFBD87094CBFC'), + BigInt('0x30310B1040A14285'), + BigInt('0xDEFC138FE0AA91F4'), + BigInt('0xA192E347D09F188D'), + BigInt('0x2021F21F80C18306'), + BigInt('0x5F4F02D7B0F40A7F'), + BigInt('0x179EF6FC78EB277B'), + BigInt('0x68F0063448DEAE02'), + BigInt('0xE943176C18803589'), + BigInt('0x962DE7A428B5BCF0'), + BigInt('0xF1C1FE77117CDF02'), + BigInt('0x8EAF0EBF2149567B'), + BigInt('0x0F1C1FE77117CDF0'), + BigInt('0x7072EF2F41224489'), + BigInt('0x38A31B04893D698D'), + BigInt('0x47CDEBCCB908E0F4'), + BigInt('0xC67EFA94E9567B7F'), + BigInt('0xB9100A5CD963F206'), + BigInt('0x57DD12C379682177'), + BigInt('0x28B3E20B495DA80E'), + BigInt('0xA900F35319033385'), + BigInt('0xD66E039B2936BAFC'), + BigInt('0x9EBFF7B0E12997F8'), + BigInt('0xE1D10778D11C1E81'), + BigInt('0x606216208142850A'), + BigInt('0x1F0CE6E8B1770C73'), + BigInt('0x8921014C99C2B083'), + BigInt('0xF64FF184A9F739FA'), + BigInt('0x77FCE0DCF9A9A271'), + BigInt('0x08921014C99C2B08'), + BigInt('0x4043E43F0183060C'), + BigInt('0x3F2D14F731B68F75'), + BigInt('0xBE9E05AF61E814FE'), + BigInt('0xC1F0F56751DD9D87'), + BigInt('0x2F3DEDF8F1D64EF6'), + BigInt('0x50531D30C1E3C78F'), + BigInt('0xD1E00C6891BD5C04'), + BigInt('0xAE8EFCA0A188D57D'), + BigInt('0xE65F088B6997F879'), + BigInt('0x9931F84359A27100'), + BigInt('0x1882E91B09FCEA8B'), + BigInt('0x67EC19D339C963F2'), + BigInt('0xD75ADABD7A6E2D6F'), + BigInt('0xA8342A754A5BA416'), + BigInt('0x29873B2D1A053F9D'), + BigInt('0x56E9CBE52A30B6E4'), + BigInt('0x1E383FCEE22F9BE0'), + BigInt('0x6156CF06D21A1299'), + BigInt('0xE0E5DE5E82448912'), + BigInt('0x9F8B2E96B271006B'), + BigInt('0x71463609127AD31A'), + BigInt('0x0E28C6C1224F5A63'), + BigInt('0x8F9BD7997211C1E8'), + BigInt('0xF0F5275142244891'), + BigInt('0xB824D37A8A3B6595'), + BigInt('0xC74A23B2BA0EECEC'), + BigInt('0x46F932EAEA507767'), + BigInt('0x3997C222DA65FE1E'), + BigInt('0xAFBA2586F2D042EE'), + BigInt('0xD0D4D54EC2E5CB97'), + BigInt('0x5167C41692BB501C'), + BigInt('0x2E0934DEA28ED965'), + BigInt('0x66D8C0F56A91F461'), + BigInt('0x19B6303D5AA47D18'), + BigInt('0x980521650AFAE693'), + BigInt('0xE76BD1AD3ACF6FEA'), + BigInt('0x09A6C9329AC4BC9B'), + BigInt('0x76C839FAAAF135E2'), + BigInt('0xF77B28A2FAAFAE69'), + BigInt('0x8815D86ACA9A2710'), + BigInt('0xC0C42C4102850A14'), + BigInt('0xBFAADC8932B0836D'), + BigInt('0x3E19CDD162EE18E6'), + BigInt('0x41773D1952DB919F'), + BigInt('0x269B24CA6B12F26D'), + BigInt('0x59F5D4025B277B14'), + BigInt('0xD846C55A0B79E09F'), + BigInt('0xA72835923B4C69E6'), + BigInt('0xEFF9C1B9F35344E2'), + BigInt('0x90973171C366CD9B'), + BigInt('0x1124202993385610'), + BigInt('0x6E4AD0E1A30DDF69'), + BigInt('0x8087C87E03060C18'), + BigInt('0xFFE938B633338561'), + BigInt('0x7E5A29EE636D1EEA'), + BigInt('0x0134D92653589793'), + BigInt('0x49E52D0D9B47BA97'), + BigInt('0x368BDDC5AB7233EE'), + BigInt('0xB738CC9DFB2CA865'), + BigInt('0xC8563C55CB19211C'), + BigInt('0x5E7BDBF1E3AC9DEC'), + BigInt('0x21152B39D3991495'), + BigInt('0xA0A63A6183C78F1E'), + BigInt('0xDFC8CAA9B3F20667'), + BigInt('0x97193E827BED2B63'), + BigInt('0xE877CE4A4BD8A21A'), + BigInt('0x69C4DF121B863991'), + BigInt('0x16AA2FDA2BB3B0E8'), + BigInt('0xF86737458BB86399'), + BigInt('0x8709C78DBB8DEAE0'), + BigInt('0x06BAD6D5EBD3716B'), + BigInt('0x79D4261DDBE6F812'), + BigInt('0x3105D23613F9D516'), + BigInt('0x4E6B22FE23CC5C6F'), + BigInt('0xCFD833A67392C7E4'), + BigInt('0xB0B6C36E43A74E9D'), + BigInt('0x9A6C9329AC4BC9B5'), + BigInt('0xE50263E19C7E40CC'), + BigInt('0x64B172B9CC20DB47'), + BigInt('0x1BDF8271FC15523E'), + BigInt('0x530E765A340A7F3A'), + BigInt('0x2C608692043FF643'), + BigInt('0xADD397CA54616DC8'), + BigInt('0xD2BD67026454E4B1'), + BigInt('0x3C707F9DC45F37C0'), + BigInt('0x431E8F55F46ABEB9'), + BigInt('0xC2AD9E0DA4342532'), + BigInt('0xBDC36EC59401AC4B'), + BigInt('0xF5129AEE5C1E814F'), + BigInt('0x8A7C6A266C2B0836'), + BigInt('0x0BCF7B7E3C7593BD'), + BigInt('0x74A18BB60C401AC4'), + BigInt('0xE28C6C1224F5A634'), + BigInt('0x9DE29CDA14C02F4D'), + BigInt('0x1C518D82449EB4C6'), + BigInt('0x633F7D4A74AB3DBF'), + BigInt('0x2BEE8961BCB410BB'), + BigInt('0x548079A98C8199C2'), + BigInt('0xD53368F1DCDF0249'), + BigInt('0xAA5D9839ECEA8B30'), + BigInt('0x449080A64CE15841'), + BigInt('0x3BFE706E7CD4D138'), + BigInt('0xBA4D61362C8A4AB3'), + BigInt('0xC52391FE1CBFC3CA'), + BigInt('0x8DF265D5D4A0EECE'), + BigInt('0xF29C951DE49567B7'), + BigInt('0x732F8445B4CBFC3C'), + BigInt('0x0C41748D84FE7545'), + BigInt('0x6BAD6D5EBD3716B7'), + BigInt('0x14C39D968D029FCE'), + BigInt('0x95708CCEDD5C0445'), + BigInt('0xEA1E7C06ED698D3C'), + BigInt('0xA2CF882D2576A038'), + BigInt('0xDDA178E515432941'), + BigInt('0x5C1269BD451DB2CA'), + BigInt('0x237C997575283BB3'), + BigInt('0xCDB181EAD523E8C2'), + BigInt('0xB2DF7122E51661BB'), + BigInt('0x336C607AB548FA30'), + BigInt('0x4C0290B2857D7349'), + BigInt('0x04D364994D625E4D'), + BigInt('0x7BBD94517D57D734'), + BigInt('0xFA0E85092D094CBF'), + BigInt('0x856075C11D3CC5C6'), + BigInt('0x134D926535897936'), + BigInt('0x6C2362AD05BCF04F'), + BigInt('0xED9073F555E26BC4'), + BigInt('0x92FE833D65D7E2BD'), + BigInt('0xDA2F7716ADC8CFB9'), + BigInt('0xA54187DE9DFD46C0'), + BigInt('0x24F29686CDA3DD4B'), + BigInt('0x5B9C664EFD965432'), + BigInt('0xB5517ED15D9D8743'), + BigInt('0xCA3F8E196DA80E3A'), + BigInt('0x4B8C9F413DF695B1'), + BigInt('0x34E26F890DC31CC8'), + BigInt('0x7C339BA2C5DC31CC'), + BigInt('0x035D6B6AF5E9B8B5'), + BigInt('0x82EE7A32A5B7233E'), + BigInt('0xFD808AFA9582AA47'), + BigInt('0x4D364994D625E4DA'), + BigInt('0x3258B95CE6106DA3'), + BigInt('0xB3EBA804B64EF628'), + BigInt('0xCC8558CC867B7F51'), + BigInt('0x8454ACE74E645255'), + BigInt('0xFB3A5C2F7E51DB2C'), + BigInt('0x7A894D772E0F40A7'), + BigInt('0x05E7BDBF1E3AC9DE'), + BigInt('0xEB2AA520BE311AAF'), + BigInt('0x944455E88E0493D6'), + BigInt('0x15F744B0DE5A085D'), + BigInt('0x6A99B478EE6F8124'), + BigInt('0x224840532670AC20'), + BigInt('0x5D26B09B16452559'), + BigInt('0xDC95A1C3461BBED2'), + BigInt('0xA3FB510B762E37AB'), + BigInt('0x35D6B6AF5E9B8B5B'), + BigInt('0x4AB846676EAE0222'), + BigInt('0xCB0B573F3EF099A9'), + BigInt('0xB465A7F70EC510D0'), + BigInt('0xFCB453DCC6DA3DD4'), + BigInt('0x83DAA314F6EFB4AD'), + BigInt('0x0269B24CA6B12F26'), + BigInt('0x7D0742849684A65F'), + BigInt('0x93CA5A1B368F752E'), + BigInt('0xECA4AAD306BAFC57'), + BigInt('0x6D17BB8B56E467DC'), + BigInt('0x12794B4366D1EEA5'), + BigInt('0x5AA8BF68AECEC3A1'), + BigInt('0x25C64FA09EFB4AD8'), + BigInt('0xA4755EF8CEA5D153'), + BigInt('0xDB1BAE30FE90582A'), + BigInt('0xBCF7B7E3C7593BD8'), + BigInt('0xC399472BF76CB2A1'), + BigInt('0x422A5673A732292A'), + BigInt('0x3D44A6BB9707A053'), + BigInt('0x759552905F188D57'), + BigInt('0x0AFBA2586F2D042E'), + BigInt('0x8B48B3003F739FA5'), + BigInt('0xF42643C80F4616DC'), + BigInt('0x1AEB5B57AF4DC5AD'), + BigInt('0x6585AB9F9F784CD4'), + BigInt('0xE436BAC7CF26D75F'), + BigInt('0x9B584A0FFF135E26'), + BigInt('0xD389BE24370C7322'), + BigInt('0xACE74EEC0739FA5B'), + BigInt('0x2D545FB4576761D0'), + BigInt('0x523AAF7C6752E8A9'), + BigInt('0xC41748D84FE75459'), + BigInt('0xBB79B8107FD2DD20'), + BigInt('0x3ACAA9482F8C46AB'), + BigInt('0x45A459801FB9CFD2'), + BigInt('0x0D75ADABD7A6E2D6'), + BigInt('0x721B5D63E7936BAF'), + BigInt('0xF3A84C3BB7CDF024'), + BigInt('0x8CC6BCF387F8795D'), + BigInt('0x620BA46C27F3AA2C'), + BigInt('0x1D6554A417C62355'), + BigInt('0x9CD645FC4798B8DE'), + BigInt('0xE3B8B53477AD31A7'), + BigInt('0xAB69411FBFB21CA3'), + BigInt('0xD407B1D78F8795DA'), + BigInt('0x55B4A08FDFD90E51'), + BigInt('0x2ADA5047EFEC8728') +]; +class CRC64 { + constructor() { + this._crc = BigInt(0); + } + update(data) { + const buffer = typeof data === 'string' ? Buffer.from(data) : data; + let crc = CRC64.flip64Bits(this._crc); + for (const dataByte of buffer) { + const crcByte = Number(crc & BigInt(0xff)); + crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8)); + } + this._crc = CRC64.flip64Bits(crc); + } + digest(encoding) { + switch (encoding) { + case 'hex': + return this._crc.toString(16).toUpperCase(); + case 'base64': + return this.toBuffer().toString('base64'); + default: + return this.toBuffer(); + } + } + toBuffer() { + return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff)))); + } + static flip64Bits(n) { + return (BigInt(1) << BigInt(64)) - BigInt(1) - n; + } +} +exports.default = CRC64; +//# sourceMappingURL=crc64.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/crc64.js.map b/node_modules/@actions/artifact/lib/internal/crc64.js.map new file mode 100644 index 00000000..97a221ba --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/crc64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crc64.js","sourceRoot":"","sources":["../../src/internal/crc64.ts"],"names":[],"mappings":";AAAA;;;;;;;;;GASG;;AAEH,iHAAiH;AACjH,MAAM,iBAAiB,GAAG;IACxB,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;IAC5B,MAAM,CAAC,oBAAoB,CAAC;CAC7B,CAAA;AAID,MAAM,KAAK;IAGT;QACE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,CAAA;IACvB,CAAC;IAED,MAAM,CAAC,IAAqB;QAC1B,MAAM,MAAM,GAAG,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;QAClE,IAAI,GAAG,GAAG,KAAK,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAErC,KAAK,MAAM,QAAQ,IAAI,MAAM,EAAE;YAC7B,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;YAC1C,GAAG,GAAG,iBAAiB,CAAC,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,GAAG,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;SACjE;QAED,IAAI,CAAC,IAAI,GAAG,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACnC,CAAC;IAED,MAAM,CAAC,QAA8B;QACnC,QAAQ,QAAQ,EAAE;YAChB,KAAK,KAAK;gBACR,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAA;YAC7C,KAAK,QAAQ;gBACX,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAA;YAC3C;gBACE,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAA;SACzB;IACH,CAAC;IAEO,QAAQ;QACd,OAAO,MAAM,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACrC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAChD,CACF,CAAA;IACH,CAAC;IAED,MAAM,CAAC,UAAU,CAAC,CAAS;QACzB,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,EAAE,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;IAClD,CAAC;CACF;AAED,kBAAe,KAAK,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-http-client.d.ts b/node_modules/@actions/artifact/lib/internal/download-http-client.d.ts new file mode 100644 index 00000000..b68b4318 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-http-client.d.ts @@ -0,0 +1,39 @@ +/// +import * as fs from 'fs'; +import { ListArtifactsResponse, QueryArtifactResponse } from './contracts'; +import { HttpClientResponse } from '@actions/http-client'; +import { DownloadItem } from './download-specification'; +export declare class DownloadHttpClient { + private downloadHttpManager; + private statusReporter; + constructor(); + /** + * Gets a list of all artifacts that are in a specific container + */ + listArtifacts(): Promise; + /** + * Fetches a set of container items that describe the contents of an artifact + * @param artifactName the name of the artifact + * @param containerUrl the artifact container URL for the run + */ + getContainerItems(artifactName: string, containerUrl: string): Promise; + /** + * Concurrently downloads all the files that are part of an artifact + * @param downloadItems information about what items to download and where to save them + */ + downloadSingleArtifact(downloadItems: DownloadItem[]): Promise; + /** + * Downloads an individual file + * @param httpClientIndex the index of the http client that is used to make all of the calls + * @param artifactLocation origin location where a file will be downloaded from + * @param downloadPath destination location for the file being downloaded + */ + private downloadIndividualFile; + /** + * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary + * @param response the http response received when downloading a file + * @param destinationStream the stream where the file should be written to + * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it + */ + pipeResponseToFile(response: HttpClientResponse, destinationStream: fs.WriteStream, isGzip: boolean): Promise; +} diff --git a/node_modules/@actions/artifact/lib/internal/download-http-client.js b/node_modules/@actions/artifact/lib/internal/download-http-client.js new file mode 100644 index 00000000..d01b73eb --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-http-client.js @@ -0,0 +1,292 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownloadHttpClient = void 0; +const fs = __importStar(require("fs")); +const core = __importStar(require("@actions/core")); +const zlib = __importStar(require("zlib")); +const utils_1 = require("./utils"); +const url_1 = require("url"); +const status_reporter_1 = require("./status-reporter"); +const perf_hooks_1 = require("perf_hooks"); +const http_manager_1 = require("./http-manager"); +const config_variables_1 = require("./config-variables"); +const requestUtils_1 = require("./requestUtils"); +class DownloadHttpClient { + constructor() { + this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download'); + // downloads are usually significantly faster than uploads so display status information every second + this.statusReporter = new status_reporter_1.StatusReporter(1000); + } + /** + * Gets a list of all artifacts that are in a specific container + */ + listArtifacts() { + return __awaiter(this, void 0, void 0, function* () { + const artifactUrl = utils_1.getArtifactUrl(); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0); + const headers = utils_1.getDownloadHeaders('application/json'); + const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); + const body = yield response.readBody(); + return JSON.parse(body); + }); + } + /** + * Fetches a set of container items that describe the contents of an artifact + * @param artifactName the name of the artifact + * @param containerUrl the artifact container URL for the run + */ + getContainerItems(artifactName, containerUrl) { + return __awaiter(this, void 0, void 0, function* () { + // the itemPath search parameter controls which containers will be returned + const resourceUrl = new url_1.URL(containerUrl); + resourceUrl.searchParams.append('itemPath', artifactName); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0); + const headers = utils_1.getDownloadHeaders('application/json'); + const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); + const body = yield response.readBody(); + return JSON.parse(body); + }); + } + /** + * Concurrently downloads all the files that are part of an artifact + * @param downloadItems information about what items to download and where to save them + */ + downloadSingleArtifact(downloadItems) { + return __awaiter(this, void 0, void 0, function* () { + const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency(); + // limit the number of files downloaded at a single time + core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; + let currentFile = 0; + let downloadedFiles = 0; + core.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); + this.statusReporter.start(); + yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < downloadItems.length) { + const currentFileToDownload = downloadItems[currentFile]; + currentFile += 1; + const startTime = perf_hooks_1.performance.now(); + yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); + if (core.isDebug()) { + core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + } + this.statusReporter.incrementProcessedCount(); + } + }))) + .catch(error => { + throw new Error(`Unable to download the artifact: ${error}`); + }) + .finally(() => { + this.statusReporter.stop(); + // safety dispose all connections + this.downloadHttpManager.disposeAndReplaceAllClients(); + }); + }); + } + /** + * Downloads an individual file + * @param httpClientIndex the index of the http client that is used to make all of the calls + * @param artifactLocation origin location where a file will be downloaded from + * @param downloadPath destination location for the file being downloaded + */ + downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) { + return __awaiter(this, void 0, void 0, function* () { + let retryCount = 0; + const retryLimit = config_variables_1.getRetryLimit(); + let destinationStream = fs.createWriteStream(downloadPath); + const headers = utils_1.getDownloadHeaders('application/json', true, true); + // a single GET request is used to download a file + const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.downloadHttpManager.getClient(httpClientIndex); + return yield client.get(artifactLocation, headers); + }); + // check the response headers to determine if the file was compressed using gzip + const isGzip = (incomingHeaders) => { + return ('content-encoding' in incomingHeaders && + incomingHeaders['content-encoding'] === 'gzip'); + }; + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided, + // it will be used + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + retryCount++; + if (retryCount > retryLimit) { + return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`)); + } + else { + this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + // Back off by waiting the specified time denoted by the retry-after header + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + yield utils_1.sleep(retryAfterValue); + } + else { + // Back off using an exponential value that depends on the retry count + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + yield utils_1.sleep(backoffTime); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with download`); + } + }); + const isAllBytesReceived = (expected, received) => { + // be lenient, if any input is missing, assume success, i.e. not truncated + if (!expected || + !received || + process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) { + core.info('Skipping download validation.'); + return true; + } + return parseInt(expected) === received; + }; + const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () { + destinationStream.close(); + // await until file is created at downloadpath; node15 and up fs.createWriteStream had not created a file yet + yield new Promise(resolve => { + destinationStream.on('close', resolve); + if (destinationStream.writableFinished) { + resolve(); + } + }); + yield utils_1.rmFile(fileDownloadPath); + destinationStream = fs.createWriteStream(fileDownloadPath); + }); + // keep trying to download a file until a retry limit has been reached + while (retryCount <= retryLimit) { + let response; + try { + response = yield makeDownloadRequest(); + } + catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the download + core.info('An error occurred while attempting to download a file'); + // eslint-disable-next-line no-console + console.log(error); + // increment the retryCount and use exponential backoff to wait before making the next request + yield backOff(); + continue; + } + let forceRetry = false; + if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string + // which can cause some gzip encoded data to be lost + // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents + try { + const isGzipped = isGzip(response.message.headers); + yield this.pipeResponseToFile(response, destinationStream, isGzipped); + if (isGzipped || + isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) { + return; + } + else { + forceRetry = true; + } + } + catch (error) { + // retry on error, most likely streams were corrupted + forceRetry = true; + } + } + if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + resetDestinationStream(downloadPath); + // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff + utils_1.isThrottledStatusCode(response.message.statusCode) + ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + : yield backOff(); + } + else { + // Some unexpected response code, fail immediately and stop the download + utils_1.displayHttpDiagnostics(response); + return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`)); + } + } + }); + } + /** + * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary + * @param response the http response received when downloading a file + * @param destinationStream the stream where the file should be written to + * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it + */ + pipeResponseToFile(response, destinationStream, isGzip) { + return __awaiter(this, void 0, void 0, function* () { + yield new Promise((resolve, reject) => { + if (isGzip) { + const gunzip = zlib.createGunzip(); + response.message + .on('error', error => { + core.error(`An error occurred while attempting to read the response stream`); + gunzip.close(); + destinationStream.close(); + reject(error); + }) + .pipe(gunzip) + .on('error', error => { + core.error(`An error occurred while attempting to decompress the response stream`); + destinationStream.close(); + reject(error); + }) + .pipe(destinationStream) + .on('close', () => { + resolve(); + }) + .on('error', error => { + core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + reject(error); + }); + } + else { + response.message + .on('error', error => { + core.error(`An error occurred while attempting to read the response stream`); + destinationStream.close(); + reject(error); + }) + .pipe(destinationStream) + .on('close', () => { + resolve(); + }) + .on('error', error => { + core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + reject(error); + }); + } + }); + return; + }); + } +} +exports.DownloadHttpClient = DownloadHttpClient; +//# sourceMappingURL=download-http-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-http-client.js.map b/node_modules/@actions/artifact/lib/internal/download-http-client.js.map new file mode 100644 index 00000000..c82645f1 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-http-client.js.map @@ -0,0 +1 @@ +{"version":3,"file":"download-http-client.js","sourceRoot":"","sources":["../../src/internal/download-http-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,oDAAqC;AACrC,2CAA4B;AAC5B,mCAYgB;AAChB,6BAAuB;AACvB,uDAAgD;AAChD,2CAAsC;AAGtC,iDAA0C;AAE1C,yDAA4E;AAE5E,iDAAqD;AAErD,MAAa,kBAAkB;IAK7B;QACE,IAAI,CAAC,mBAAmB,GAAG,IAAI,0BAAW,CACxC,6CAA0B,EAAE,EAC5B,4BAA4B,CAC7B,CAAA;QACD,qGAAqG;QACrG,IAAI,CAAC,cAAc,GAAG,IAAI,gCAAc,CAAC,IAAI,CAAC,CAAA;IAChD,CAAC;IAED;;OAEG;IACG,aAAa;;YACjB,MAAM,WAAW,GAAG,sBAAc,EAAE,CAAA;YAEpC,+GAA+G;YAC/G,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YACpD,MAAM,OAAO,GAAG,0BAAkB,CAAC,kBAAkB,CAAC,CAAA;YACtD,MAAM,QAAQ,GAAG,MAAM,qCAAsB,CAAC,gBAAgB,EAAE,GAAS,EAAE,gDACzE,OAAA,MAAM,CAAC,GAAG,CAAC,WAAW,EAAE,OAAO,CAAC,CAAA,GAAA,CACjC,CAAA;YACD,MAAM,IAAI,GAAW,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;YAC9C,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;QACzB,CAAC;KAAA;IAED;;;;OAIG;IACG,iBAAiB,CACrB,YAAoB,EACpB,YAAoB;;YAEpB,2EAA2E;YAC3E,MAAM,WAAW,GAAG,IAAI,SAAG,CAAC,YAAY,CAAC,CAAA;YACzC,WAAW,CAAC,YAAY,CAAC,MAAM,CAAC,UAAU,EAAE,YAAY,CAAC,CAAA;YAEzD,+GAA+G;YAC/G,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YACpD,MAAM,OAAO,GAAG,0BAAkB,CAAC,kBAAkB,CAAC,CAAA;YACtD,MAAM,QAAQ,GAAG,MAAM,qCAAsB,CAC3C,qBAAqB,EACrB,GAAS,EAAE,gDAAC,OAAA,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,QAAQ,EAAE,EAAE,OAAO,CAAC,CAAA,GAAA,CACxD,CAAA;YACD,MAAM,IAAI,GAAW,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;YAC9C,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;QACzB,CAAC;KAAA;IAED;;;OAGG;IACG,sBAAsB,CAAC,aAA6B;;YACxD,MAAM,oBAAoB,GAAG,6CAA0B,EAAE,CAAA;YACzD,wDAAwD;YACxD,IAAI,CAAC,KAAK,CAAC,uCAAuC,oBAAoB,EAAE,CAAC,CAAA;YACzE,MAAM,iBAAiB,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YACrE,IAAI,WAAW,GAAG,CAAC,CAAA;YACnB,IAAI,eAAe,GAAG,CAAC,CAAA;YAEvB,IAAI,CAAC,IAAI,CACP,kDAAkD,aAAa,CAAC,MAAM,EAAE,CACzE,CAAA;YAED,IAAI,CAAC,cAAc,CAAC,8BAA8B,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;YACxE,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,CAAA;YAE3B,MAAM,OAAO,CAAC,GAAG,CACf,iBAAiB,CAAC,GAAG,CAAC,CAAM,KAAK,EAAC,EAAE;gBAClC,OAAO,WAAW,GAAG,aAAa,CAAC,MAAM,EAAE;oBACzC,MAAM,qBAAqB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAA;oBACxD,WAAW,IAAI,CAAC,CAAA;oBAEhB,MAAM,SAAS,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAA;oBACnC,MAAM,IAAI,CAAC,sBAAsB,CAC/B,KAAK,EACL,qBAAqB,CAAC,cAAc,EACpC,qBAAqB,CAAC,UAAU,CACjC,CAAA;oBAED,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;wBAClB,IAAI,CAAC,KAAK,CACR,SAAS,EAAE,eAAe,IAAI,aAAa,CAAC,MAAM,KAChD,qBAAqB,CAAC,UACxB,SAAS,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC,CAAC,OAAO,CAC9C,CAAC,CACF,qCAAqC,CACvC,CAAA;qBACF;oBAED,IAAI,CAAC,cAAc,CAAC,uBAAuB,EAAE,CAAA;iBAC9C;YACH,CAAC,CAAA,CAAC,CACH;iBACE,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,oCAAoC,KAAK,EAAE,CAAC,CAAA;YAC9D,CAAC,CAAC;iBACD,OAAO,CAAC,GAAG,EAAE;gBACZ,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAA;gBAC1B,iCAAiC;gBACjC,IAAI,CAAC,mBAAmB,CAAC,2BAA2B,EAAE,CAAA;YACxD,CAAC,CAAC,CAAA;QACN,CAAC;KAAA;IAED;;;;;OAKG;IACW,sBAAsB,CAClC,eAAuB,EACvB,gBAAwB,EACxB,YAAoB;;YAEpB,IAAI,UAAU,GAAG,CAAC,CAAA;YAClB,MAAM,UAAU,GAAG,gCAAa,EAAE,CAAA;YAClC,IAAI,iBAAiB,GAAG,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;YAC1D,MAAM,OAAO,GAAG,0BAAkB,CAAC,kBAAkB,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;YAElE,kDAAkD;YAClD,MAAM,mBAAmB,GAAG,GAAsC,EAAE;gBAClE,MAAM,MAAM,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;gBAClE,OAAO,MAAM,MAAM,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAA;YACpD,CAAC,CAAA,CAAA;YAED,gFAAgF;YAChF,MAAM,MAAM,GAAG,CAAC,eAAoC,EAAW,EAAE;gBAC/D,OAAO,CACL,kBAAkB,IAAI,eAAe;oBACrC,eAAe,CAAC,kBAAkB,CAAC,KAAK,MAAM,CAC/C,CAAA;YACH,CAAC,CAAA;YAED,yFAAyF;YACzF,iHAAiH;YACjH,kBAAkB;YAClB,MAAM,OAAO,GAAG,CAAO,eAAwB,EAAiB,EAAE;gBAChE,UAAU,EAAE,CAAA;gBACZ,IAAI,UAAU,GAAG,UAAU,EAAE;oBAC3B,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CACP,oDAAoD,gBAAgB,EAAE,CACvE,CACF,CAAA;iBACF;qBAAM;oBACL,IAAI,CAAC,mBAAmB,CAAC,uBAAuB,CAAC,eAAe,CAAC,CAAA;oBACjE,IAAI,eAAe,EAAE;wBACnB,2EAA2E;wBAC3E,IAAI,CAAC,IAAI,CACP,4CAA4C,UAAU,iBAAiB,eAAe,8CAA8C,CACrI,CAAA;wBACD,MAAM,aAAK,CAAC,eAAe,CAAC,CAAA;qBAC7B;yBAAM;wBACL,sEAAsE;wBACtE,MAAM,WAAW,GAAG,6CAAqC,CAAC,UAAU,CAAC,CAAA;wBACrE,IAAI,CAAC,IAAI,CACP,kCAAkC,UAAU,iBAAiB,WAAW,8CAA8C,CACvH,CAAA;wBACD,MAAM,aAAK,CAAC,WAAW,CAAC,CAAA;qBACzB;oBACD,IAAI,CAAC,IAAI,CACP,+BAA+B,UAAU,4BAA4B,CACtE,CAAA;iBACF;YACH,CAAC,CAAA,CAAA;YAED,MAAM,kBAAkB,GAAG,CACzB,QAAiB,EACjB,QAAiB,EACR,EAAE;gBACX,0EAA0E;gBAC1E,IACE,CAAC,QAAQ;oBACT,CAAC,QAAQ;oBACT,OAAO,CAAC,GAAG,CAAC,2CAA2C,CAAC,EACxD;oBACA,IAAI,CAAC,IAAI,CAAC,+BAA+B,CAAC,CAAA;oBAC1C,OAAO,IAAI,CAAA;iBACZ;gBAED,OAAO,QAAQ,CAAC,QAAQ,CAAC,KAAK,QAAQ,CAAA;YACxC,CAAC,CAAA;YAED,MAAM,sBAAsB,GAAG,CAC7B,gBAAwB,EACT,EAAE;gBACjB,iBAAiB,CAAC,KAAK,EAAE,CAAA;gBACzB,6GAA6G;gBAC7G,MAAM,IAAI,OAAO,CAAO,OAAO,CAAC,EAAE;oBAChC,iBAAiB,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;oBACtC,IAAI,iBAAiB,CAAC,gBAAgB,EAAE;wBACtC,OAAO,EAAE,CAAA;qBACV;gBACH,CAAC,CAAC,CAAA;gBACF,MAAM,cAAM,CAAC,gBAAgB,CAAC,CAAA;gBAC9B,iBAAiB,GAAG,EAAE,CAAC,iBAAiB,CAAC,gBAAgB,CAAC,CAAA;YAC5D,CAAC,CAAA,CAAA;YAED,sEAAsE;YACtE,OAAO,UAAU,IAAI,UAAU,EAAE;gBAC/B,IAAI,QAA4B,CAAA;gBAChC,IAAI;oBACF,QAAQ,GAAG,MAAM,mBAAmB,EAAE,CAAA;iBACvC;gBAAC,OAAO,KAAK,EAAE;oBACd,qFAAqF;oBACrF,IAAI,CAAC,IAAI,CAAC,uDAAuD,CAAC,CAAA;oBAClE,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;oBAElB,8FAA8F;oBAC9F,MAAM,OAAO,EAAE,CAAA;oBACf,SAAQ;iBACT;gBAED,IAAI,UAAU,GAAG,KAAK,CAAA;gBACtB,IAAI,2BAAmB,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;oBACpD,oIAAoI;oBACpI,oDAAoD;oBACpD,oIAAoI;oBACpI,IAAI;wBACF,MAAM,SAAS,GAAG,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;wBAClD,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,iBAAiB,EAAE,SAAS,CAAC,CAAA;wBAErE,IACE,SAAS;4BACT,kBAAkB,CAChB,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,EAC1C,MAAM,mBAAW,CAAC,YAAY,CAAC,CAChC,EACD;4BACA,OAAM;yBACP;6BAAM;4BACL,UAAU,GAAG,IAAI,CAAA;yBAClB;qBACF;oBAAC,OAAO,KAAK,EAAE;wBACd,qDAAqD;wBACrD,UAAU,GAAG,IAAI,CAAA;qBAClB;iBACF;gBAED,IAAI,UAAU,IAAI,6BAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;oBACpE,IAAI,CAAC,IAAI,CACP,KAAK,QAAQ,CAAC,OAAO,CAAC,UAAU,2EAA2E,CAC5G,CAAA;oBACD,sBAAsB,CAAC,YAAY,CAAC,CAAA;oBACpC,8HAA8H;oBAC9H,6BAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;wBAChD,CAAC,CAAC,MAAM,OAAO,CACX,+CAAuC,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAClE;wBACH,CAAC,CAAC,MAAM,OAAO,EAAE,CAAA;iBACpB;qBAAM;oBACL,wEAAwE;oBACxE,8BAAsB,CAAC,QAAQ,CAAC,CAAA;oBAChC,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CACP,mBAAmB,QAAQ,CAAC,OAAO,CAAC,UAAU,wBAAwB,gBAAgB,EAAE,CACzF,CACF,CAAA;iBACF;aACF;QACH,CAAC;KAAA;IAED;;;;;OAKG;IACG,kBAAkB,CACtB,QAA4B,EAC5B,iBAAiC,EACjC,MAAe;;YAEf,MAAM,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACpC,IAAI,MAAM,EAAE;oBACV,MAAM,MAAM,GAAG,IAAI,CAAC,YAAY,EAAE,CAAA;oBAClC,QAAQ,CAAC,OAAO;yBACb,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,IAAI,CAAC,KAAK,CACR,gEAAgE,CACjE,CAAA;wBACD,MAAM,CAAC,KAAK,EAAE,CAAA;wBACd,iBAAiB,CAAC,KAAK,EAAE,CAAA;wBACzB,MAAM,CAAC,KAAK,CAAC,CAAA;oBACf,CAAC,CAAC;yBACD,IAAI,CAAC,MAAM,CAAC;yBACZ,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,IAAI,CAAC,KAAK,CACR,sEAAsE,CACvE,CAAA;wBACD,iBAAiB,CAAC,KAAK,EAAE,CAAA;wBACzB,MAAM,CAAC,KAAK,CAAC,CAAA;oBACf,CAAC,CAAC;yBACD,IAAI,CAAC,iBAAiB,CAAC;yBACvB,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE;wBAChB,OAAO,EAAE,CAAA;oBACX,CAAC,CAAC;yBACD,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,IAAI,CAAC,KAAK,CACR,wDAAwD,iBAAiB,CAAC,IAAI,EAAE,CACjF,CAAA;wBACD,MAAM,CAAC,KAAK,CAAC,CAAA;oBACf,CAAC,CAAC,CAAA;iBACL;qBAAM;oBACL,QAAQ,CAAC,OAAO;yBACb,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,IAAI,CAAC,KAAK,CACR,gEAAgE,CACjE,CAAA;wBACD,iBAAiB,CAAC,KAAK,EAAE,CAAA;wBACzB,MAAM,CAAC,KAAK,CAAC,CAAA;oBACf,CAAC,CAAC;yBACD,IAAI,CAAC,iBAAiB,CAAC;yBACvB,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE;wBAChB,OAAO,EAAE,CAAA;oBACX,CAAC,CAAC;yBACD,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,IAAI,CAAC,KAAK,CACR,wDAAwD,iBAAiB,CAAC,IAAI,EAAE,CACjF,CAAA;wBACD,MAAM,CAAC,KAAK,CAAC,CAAA;oBACf,CAAC,CAAC,CAAA;iBACL;YACH,CAAC,CAAC,CAAA;YACF,OAAM;QACR,CAAC;KAAA;CACF;AA9UD,gDA8UC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-options.d.ts b/node_modules/@actions/artifact/lib/internal/download-options.d.ts new file mode 100644 index 00000000..5ad88af3 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-options.d.ts @@ -0,0 +1,7 @@ +export interface DownloadOptions { + /** + * Specifies if a folder is created for the artifact that is downloaded (contents downloaded into this folder), + * defaults to false if not specified + * */ + createArtifactFolder?: boolean; +} diff --git a/node_modules/@actions/artifact/lib/internal/download-options.js b/node_modules/@actions/artifact/lib/internal/download-options.js new file mode 100644 index 00000000..8860312d --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=download-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-options.js.map b/node_modules/@actions/artifact/lib/internal/download-options.js.map new file mode 100644 index 00000000..a6ba1e54 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"download-options.js","sourceRoot":"","sources":["../../src/internal/download-options.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-response.d.ts b/node_modules/@actions/artifact/lib/internal/download-response.d.ts new file mode 100644 index 00000000..d4ac5a22 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-response.d.ts @@ -0,0 +1,10 @@ +export interface DownloadResponse { + /** + * The name of the artifact that was downloaded + */ + artifactName: string; + /** + * The full Path to where the artifact was downloaded + */ + downloadPath: string; +} diff --git a/node_modules/@actions/artifact/lib/internal/download-response.js b/node_modules/@actions/artifact/lib/internal/download-response.js new file mode 100644 index 00000000..d1d13f23 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-response.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=download-response.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-response.js.map b/node_modules/@actions/artifact/lib/internal/download-response.js.map new file mode 100644 index 00000000..3602c0d7 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-response.js.map @@ -0,0 +1 @@ +{"version":3,"file":"download-response.js","sourceRoot":"","sources":["../../src/internal/download-response.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-specification.d.ts b/node_modules/@actions/artifact/lib/internal/download-specification.d.ts new file mode 100644 index 00000000..ee5d5ec6 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-specification.d.ts @@ -0,0 +1,19 @@ +import { ContainerEntry } from './contracts'; +export interface DownloadSpecification { + rootDownloadLocation: string; + directoryStructure: string[]; + emptyFilesToCreate: string[]; + filesToDownload: DownloadItem[]; +} +export interface DownloadItem { + sourceLocation: string; + targetPath: string; +} +/** + * Creates a specification for a set of files that will be downloaded + * @param artifactName the name of the artifact + * @param artifactEntries a set of container entries that describe that files that make up an artifact + * @param downloadPath the path where the artifact will be downloaded to + * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to + */ +export declare function getDownloadSpecification(artifactName: string, artifactEntries: ContainerEntry[], downloadPath: string, includeRootDirectory: boolean): DownloadSpecification; diff --git a/node_modules/@actions/artifact/lib/internal/download-specification.js b/node_modules/@actions/artifact/lib/internal/download-specification.js new file mode 100644 index 00000000..8981abe9 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-specification.js @@ -0,0 +1,74 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getDownloadSpecification = void 0; +const path = __importStar(require("path")); +/** + * Creates a specification for a set of files that will be downloaded + * @param artifactName the name of the artifact + * @param artifactEntries a set of container entries that describe that files that make up an artifact + * @param downloadPath the path where the artifact will be downloaded to + * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to + */ +function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { + // use a set for the directory paths so that there are no duplicates + const directories = new Set(); + const specifications = { + rootDownloadLocation: includeRootDirectory + ? path.join(downloadPath, artifactName) + : downloadPath, + directoryStructure: [], + emptyFilesToCreate: [], + filesToDownload: [] + }; + for (const entry of artifactEntries) { + // Ignore artifacts in the container that don't begin with the same name + if (entry.path.startsWith(`${artifactName}/`) || + entry.path.startsWith(`${artifactName}\\`)) { + // normalize all separators to the local OS + const normalizedPathEntry = path.normalize(entry.path); + // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path + const filePath = path.join(downloadPath, includeRootDirectory + ? normalizedPathEntry + : normalizedPathEntry.replace(artifactName, '')); + // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' + // itemType cannot be relied upon. The file must be used to determine the directory structure + if (entry.itemType === 'file') { + // Get the directories that we need to create from the filePath for each individual file + directories.add(path.dirname(filePath)); + if (entry.fileLength === 0) { + // An empty file was uploaded, create the empty files locally so that no extra http calls are made + specifications.emptyFilesToCreate.push(filePath); + } + else { + specifications.filesToDownload.push({ + sourceLocation: entry.contentLocation, + targetPath: filePath + }); + } + } + } + } + specifications.directoryStructure = Array.from(directories); + return specifications; +} +exports.getDownloadSpecification = getDownloadSpecification; +//# sourceMappingURL=download-specification.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download-specification.js.map b/node_modules/@actions/artifact/lib/internal/download-specification.js.map new file mode 100644 index 00000000..17e505e1 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/download-specification.js.map @@ -0,0 +1 @@ +{"version":3,"file":"download-specification.js","sourceRoot":"","sources":["../../src/internal/download-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAyB5B;;;;;;GAMG;AACH,SAAgB,wBAAwB,CACtC,YAAoB,EACpB,eAAiC,EACjC,YAAoB,EACpB,oBAA6B;IAE7B,oEAAoE;IACpE,MAAM,WAAW,GAAG,IAAI,GAAG,EAAU,CAAA;IAErC,MAAM,cAAc,GAA0B;QAC5C,oBAAoB,EAAE,oBAAoB;YACxC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,YAAY,CAAC;YACvC,CAAC,CAAC,YAAY;QAChB,kBAAkB,EAAE,EAAE;QACtB,kBAAkB,EAAE,EAAE;QACtB,eAAe,EAAE,EAAE;KACpB,CAAA;IAED,KAAK,MAAM,KAAK,IAAI,eAAe,EAAE;QACnC,wEAAwE;QACxE,IACE,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,GAAG,CAAC;YACzC,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,YAAY,IAAI,CAAC,EAC1C;YACA,2CAA2C;YAC3C,MAAM,mBAAmB,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACtD,oIAAoI;YACpI,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CACxB,YAAY,EACZ,oBAAoB;gBAClB,CAAC,CAAC,mBAAmB;gBACrB,CAAC,CAAC,mBAAmB,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAClD,CAAA;YAED,2GAA2G;YAC3G,6FAA6F;YAC7F,IAAI,KAAK,CAAC,QAAQ,KAAK,MAAM,EAAE;gBAC7B,wFAAwF;gBACxF,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAA;gBACvC,IAAI,KAAK,CAAC,UAAU,KAAK,CAAC,EAAE;oBAC1B,kGAAkG;oBAClG,cAAc,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACjD;qBAAM;oBACL,cAAc,CAAC,eAAe,CAAC,IAAI,CAAC;wBAClC,cAAc,EAAE,KAAK,CAAC,eAAe;wBACrC,UAAU,EAAE,QAAQ;qBACrB,CAAC,CAAA;iBACH;aACF;SACF;KACF;IAED,cAAc,CAAC,kBAAkB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAC3D,OAAO,cAAc,CAAA;AACvB,CAAC;AAtDD,4DAsDC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/http-manager.d.ts b/node_modules/@actions/artifact/lib/internal/http-manager.d.ts new file mode 100644 index 00000000..c6ebc01f --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/http-manager.d.ts @@ -0,0 +1,12 @@ +import { HttpClient } from '@actions/http-client'; +/** + * Used for managing http clients during either upload or download + */ +export declare class HttpManager { + private clients; + private userAgent; + constructor(clientCount: number, userAgent: string); + getClient(index: number): HttpClient; + disposeAndReplaceClient(index: number): void; + disposeAndReplaceAllClients(): void; +} diff --git a/node_modules/@actions/artifact/lib/internal/http-manager.js b/node_modules/@actions/artifact/lib/internal/http-manager.js new file mode 100644 index 00000000..3c5eb476 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/http-manager.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpManager = void 0; +const utils_1 = require("./utils"); +/** + * Used for managing http clients during either upload or download + */ +class HttpManager { + constructor(clientCount, userAgent) { + if (clientCount < 1) { + throw new Error('There must be at least one client'); + } + this.userAgent = userAgent; + this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); + } + getClient(index) { + return this.clients[index]; + } + // client disposal is necessary if a keep-alive connection is used to properly close the connection + // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 + disposeAndReplaceClient(index) { + this.clients[index].dispose(); + this.clients[index] = utils_1.createHttpClient(this.userAgent); + } + disposeAndReplaceAllClients() { + for (const [index] of this.clients.entries()) { + this.disposeAndReplaceClient(index); + } + } +} +exports.HttpManager = HttpManager; +//# sourceMappingURL=http-manager.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/http-manager.js.map b/node_modules/@actions/artifact/lib/internal/http-manager.js.map new file mode 100644 index 00000000..7c7a4acd --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/http-manager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"http-manager.js","sourceRoot":"","sources":["../../src/internal/http-manager.ts"],"names":[],"mappings":";;;AACA,mCAAwC;AAExC;;GAEG;AACH,MAAa,WAAW;IAItB,YAAY,WAAmB,EAAE,SAAiB;QAChD,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;SACrD;QACD,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,wBAAgB,CAAC,SAAS,CAAC,CAAC,CAAA;IACzE,CAAC;IAED,SAAS,CAAC,KAAa;QACrB,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;IAC5B,CAAC;IAED,mGAAmG;IACnG,+HAA+H;IAC/H,uBAAuB,CAAC,KAAa;QACnC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAA;QAC7B,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,wBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;IAED,2BAA2B;QACzB,KAAK,MAAM,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE;YAC5C,IAAI,CAAC,uBAAuB,CAAC,KAAK,CAAC,CAAA;SACpC;IACH,CAAC;CACF;AA5BD,kCA4BC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts b/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts new file mode 100644 index 00000000..04aea3d2 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.d.ts @@ -0,0 +1,8 @@ +/** + * Scans the name of the artifact to make sure there are no illegal characters + */ +export declare function checkArtifactName(name: string): void; +/** + * Scans the name of the filePath used to make sure there are no illegal characters + */ +export declare function checkArtifactFilePath(path: string): void; diff --git a/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js b/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js new file mode 100644 index 00000000..7fb2b02b --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js @@ -0,0 +1,67 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkArtifactFilePath = exports.checkArtifactName = void 0; +const core_1 = require("@actions/core"); +/** + * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected + * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain + * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an + * individual filesystem/platform will not be supported on all fileSystems/platforms + * + * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone + */ +const invalidArtifactFilePathCharacters = new Map([ + ['"', ' Double quote "'], + [':', ' Colon :'], + ['<', ' Less than <'], + ['>', ' Greater than >'], + ['|', ' Vertical bar |'], + ['*', ' Asterisk *'], + ['?', ' Question mark ?'], + ['\r', ' Carriage return \\r'], + ['\n', ' Line feed \\n'] +]); +const invalidArtifactNameCharacters = new Map([ + ...invalidArtifactFilePathCharacters, + ['\\', ' Backslash \\'], + ['/', ' Forward slash /'] +]); +/** + * Scans the name of the artifact to make sure there are no illegal characters + */ +function checkArtifactName(name) { + if (!name) { + throw new Error(`Artifact name: ${name}, is incorrectly provided`); + } + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) { + if (name.includes(invalidCharacterKey)) { + throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()} + +These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); + } + } + core_1.info(`Artifact name is valid!`); +} +exports.checkArtifactName = checkArtifactName; +/** + * Scans the name of the filePath used to make sure there are no illegal characters + */ +function checkArtifactFilePath(path) { + if (!path) { + throw new Error(`Artifact path: ${path}, is incorrectly provided`); + } + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { + if (path.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} + +The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. + `); + } + } +} +exports.checkArtifactFilePath = checkArtifactFilePath; +//# sourceMappingURL=path-and-artifact-name-validation.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map b/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map new file mode 100644 index 00000000..2070858d --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/path-and-artifact-name-validation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-and-artifact-name-validation.js","sourceRoot":"","sources":["../../src/internal/path-and-artifact-name-validation.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAiB;IAChE,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,UAAU,CAAC;IACjB,CAAC,GAAG,EAAE,cAAc,CAAC;IACrB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,aAAa,CAAC;IACpB,CAAC,GAAG,EAAE,kBAAkB,CAAC;IACzB,CAAC,IAAI,EAAE,sBAAsB,CAAC;IAC9B,CAAC,IAAI,EAAE,gBAAgB,CAAC;CACzB,CAAC,CAAA;AAEF,MAAM,6BAA6B,GAAG,IAAI,GAAG,CAAiB;IAC5D,GAAG,iCAAiC;IACpC,CAAC,IAAI,EAAE,eAAe,CAAC;IACvB,CAAC,GAAG,EAAE,kBAAkB,CAAC;CAC1B,CAAC,CAAA;AAEF;;GAEG;AACH,SAAgB,iBAAiB,CAAC,IAAY;IAC5C,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,2BAA2B,CAAC,CAAA;KACnE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,6BAA6B,EAAE;QAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,+BAA+B,IAAI,uCAAuC,wBAAwB;;8BAE5E,KAAK,CAAC,IAAI,CAC9B,6BAA6B,CAAC,MAAM,EAAE,CACvC,CAAC,QAAQ,EAAE;;mRAE+P,CAC5Q,CAAA;SACF;KACF;IAED,WAAI,CAAC,yBAAyB,CAAC,CAAA;AACjC,CAAC;AAvBD,8CAuBC;AAED;;GAEG;AACH,SAAgB,qBAAqB,CAAC,IAAY;IAChD,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,kBAAkB,IAAI,2BAA2B,CAAC,CAAA;KACnE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,iCAAiC,EAAE;QACtC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,+BAA+B,IAAI,uCAAuC,wBAAwB;;8BAE5E,KAAK,CAAC,IAAI,CAC9B,iCAAiC,CAAC,MAAM,EAAE,CAC3C,CAAC,QAAQ,EAAE;;;WAGT,CACJ,CAAA;SACF;KACF;AACH,CAAC;AAtBD,sDAsBC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/requestUtils.d.ts b/node_modules/@actions/artifact/lib/internal/requestUtils.d.ts new file mode 100644 index 00000000..0a0fb99b --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/requestUtils.d.ts @@ -0,0 +1,3 @@ +import { HttpClientResponse } from '@actions/http-client'; +export declare function retry(name: string, operation: () => Promise, customErrorMessages: Map, maxAttempts: number): Promise; +export declare function retryHttpClientRequest(name: string, method: () => Promise, customErrorMessages?: Map, maxAttempts?: number): Promise; diff --git a/node_modules/@actions/artifact/lib/internal/requestUtils.js b/node_modules/@actions/artifact/lib/internal/requestUtils.js new file mode 100644 index 00000000..2d3dfca8 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/requestUtils.js @@ -0,0 +1,88 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryHttpClientRequest = exports.retry = void 0; +const utils_1 = require("./utils"); +const core = __importStar(require("@actions/core")); +const config_variables_1 = require("./config-variables"); +function retry(name, operation, customErrorMessages, maxAttempts) { + return __awaiter(this, void 0, void 0, function* () { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + let errorMessage = ''; + let customErrorInformation = undefined; + let attempt = 1; + while (attempt <= maxAttempts) { + try { + response = yield operation(); + statusCode = response.message.statusCode; + if (utils_1.isSuccessStatusCode(statusCode)) { + return response; + } + // Extra error information that we want to display if a particular response code is hit + if (statusCode) { + customErrorInformation = customErrorMessages.get(statusCode); + } + isRetryable = utils_1.isRetryableStatusCode(statusCode); + errorMessage = `Artifact service responded with ${statusCode}`; + } + catch (error) { + isRetryable = true; + errorMessage = error.message; + } + if (!isRetryable) { + core.info(`${name} - Error is not retryable`); + if (response) { + utils_1.displayHttpDiagnostics(response); + } + break; + } + core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt)); + attempt++; + } + if (response) { + utils_1.displayHttpDiagnostics(response); + } + if (customErrorInformation) { + throw Error(`${name} failed: ${customErrorInformation}`); + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, customErrorMessages, maxAttempts); + }); +} +exports.retryHttpClientRequest = retryHttpClientRequest; +//# sourceMappingURL=requestUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/requestUtils.js.map b/node_modules/@actions/artifact/lib/internal/requestUtils.js.map new file mode 100644 index 00000000..a09c474e --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/requestUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,mCAMgB;AAChB,oDAAqC;AACrC,yDAAgD;AAEhD,SAAsB,KAAK,CACzB,IAAY,EACZ,SAA4C,EAC5C,mBAAwC,EACxC,WAAmB;;QAEnB,IAAI,QAAQ,GAAmC,SAAS,CAAA;QACxD,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,sBAAsB,GAAuB,SAAS,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI;gBACF,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;gBAC5B,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;gBAExC,IAAI,2BAAmB,CAAC,UAAU,CAAC,EAAE;oBACnC,OAAO,QAAQ,CAAA;iBAChB;gBAED,uFAAuF;gBACvF,IAAI,UAAU,EAAE;oBACd,sBAAsB,GAAG,mBAAmB,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;iBAC7D;gBAED,WAAW,GAAG,6BAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,mCAAmC,UAAU,EAAE,CAAA;aAC/D;YAAC,OAAO,KAAK,EAAE;gBACd,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC7C,IAAI,QAAQ,EAAE;oBACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;iBACjC;gBACD,MAAK;aACN;YAED,IAAI,CAAC,IAAI,CACP,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,MAAM,aAAK,CAAC,6CAAqC,CAAC,OAAO,CAAC,CAAC,CAAA;YAC3D,OAAO,EAAE,CAAA;SACV;QAED,IAAI,QAAQ,EAAE;YACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;SACjC;QAED,IAAI,sBAAsB,EAAE;YAC1B,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,sBAAsB,EAAE,CAAC,CAAA;SACzD;QACD,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AA1DD,sBA0DC;AAED,SAAsB,sBAAsB,CAC1C,IAAY,EACZ,MAAyC,EACzC,sBAA2C,IAAI,GAAG,EAAE,EACpD,WAAW,GAAG,gCAAa,EAAE;;QAE7B,OAAO,MAAM,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,mBAAmB,EAAE,WAAW,CAAC,CAAA;IACpE,CAAC;CAAA;AAPD,wDAOC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/status-reporter.d.ts b/node_modules/@actions/artifact/lib/internal/status-reporter.d.ts new file mode 100644 index 00000000..e301f7ac --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/status-reporter.d.ts @@ -0,0 +1,21 @@ +/** + * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded + * + * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable + * The total status of the upload/download gets displayed according to this value + * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + */ +export declare class StatusReporter { + private totalNumberOfFilesToProcess; + private processedCount; + private displayFrequencyInMilliseconds; + private largeFiles; + private totalFileStatus; + constructor(displayFrequencyInMilliseconds: number); + setTotalNumberOfFilesToProcess(fileTotal: number): void; + start(): void; + updateLargeFileStatus(fileName: string, chunkStartIndex: number, chunkEndIndex: number, totalUploadFileSize: number): void; + stop(): void; + incrementProcessedCount(): void; + private formatPercentage; +} diff --git a/node_modules/@actions/artifact/lib/internal/status-reporter.js b/node_modules/@actions/artifact/lib/internal/status-reporter.js new file mode 100644 index 00000000..46b77a42 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/status-reporter.js @@ -0,0 +1,52 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.StatusReporter = void 0; +const core_1 = require("@actions/core"); +/** + * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded + * + * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable + * The total status of the upload/download gets displayed according to this value + * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + */ +class StatusReporter { + constructor(displayFrequencyInMilliseconds) { + this.totalNumberOfFilesToProcess = 0; + this.processedCount = 0; + this.largeFiles = new Map(); + this.totalFileStatus = undefined; + this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; + } + setTotalNumberOfFilesToProcess(fileTotal) { + this.totalNumberOfFilesToProcess = fileTotal; + this.processedCount = 0; + } + start() { + // displays information about the total upload/download status + this.totalFileStatus = setInterval(() => { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); + core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); + }, this.displayFrequencyInMilliseconds); + } + // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload + updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); + core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); + } + stop() { + if (this.totalFileStatus) { + clearInterval(this.totalFileStatus); + } + } + incrementProcessedCount() { + this.processedCount++; + } + formatPercentage(numerator, denominator) { + // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed + return ((numerator / denominator) * 100).toFixed(4).toString(); + } +} +exports.StatusReporter = StatusReporter; +//# sourceMappingURL=status-reporter.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/status-reporter.js.map b/node_modules/@actions/artifact/lib/internal/status-reporter.js.map new file mode 100644 index 00000000..ce4bb697 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/status-reporter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status-reporter.js","sourceRoot":"","sources":["../../src/internal/status-reporter.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;GAMG;AAEH,MAAa,cAAc;IAOzB,YAAY,8BAAsC;QAN1C,gCAA2B,GAAG,CAAC,CAAA;QAC/B,mBAAc,GAAG,CAAC,CAAA;QAElB,eAAU,GAAG,IAAI,GAAG,EAAkB,CAAA;QAI5C,IAAI,CAAC,eAAe,GAAG,SAAS,CAAA;QAChC,IAAI,CAAC,8BAA8B,GAAG,8BAA8B,CAAA;IACtE,CAAC;IAED,8BAA8B,CAAC,SAAiB;QAC9C,IAAI,CAAC,2BAA2B,GAAG,SAAS,CAAA;QAC5C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;IACzB,CAAC;IAED,KAAK;QACH,8DAA8D;QAC9D,IAAI,CAAC,eAAe,GAAG,WAAW,CAAC,GAAG,EAAE;YACtC,+CAA+C;YAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CACtC,IAAI,CAAC,cAAc,EACnB,IAAI,CAAC,2BAA2B,CACjC,CAAA;YACD,WAAI,CACF,qBACE,IAAI,CAAC,2BACP,yBAAyB,IAAI,CAAC,cAAc,KAAK,UAAU,CAAC,KAAK,CAC/D,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,IAAI,CACN,CAAA;QACH,CAAC,EAAE,IAAI,CAAC,8BAA8B,CAAC,CAAA;IACzC,CAAC;IAED,sIAAsI;IACtI,qBAAqB,CACnB,QAAgB,EAChB,eAAuB,EACvB,aAAqB,EACrB,mBAA2B;QAE3B,+CAA+C;QAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,mBAAmB,CAAC,CAAA;QAC5E,WAAI,CACF,YAAY,QAAQ,KAAK,UAAU,CAAC,KAAK,CACvC,CAAC,EACD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAC5B,YAAY,eAAe,IAAI,aAAa,EAAE,CAChD,CAAA;IACH,CAAC;IAED,IAAI;QACF,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;SACpC;IACH,CAAC;IAED,uBAAuB;QACrB,IAAI,CAAC,cAAc,EAAE,CAAA;IACvB,CAAC;IAEO,gBAAgB,CAAC,SAAiB,EAAE,WAAmB;QAC7D,0HAA0H;QAC1H,OAAO,CAAC,CAAC,SAAS,GAAG,WAAW,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;IAChE,CAAC;CACF;AAnED,wCAmEC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-gzip.d.ts b/node_modules/@actions/artifact/lib/internal/upload-gzip.d.ts new file mode 100644 index 00000000..d59a75be --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-gzip.d.ts @@ -0,0 +1,14 @@ +/// +/** + * Creates a Gzip compressed file of an original file at the provided temporary filepath location + * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified + * @param {string} tempFilePath the location of where the Gzip file will be created + * @returns the size of gzip file that gets created + */ +export declare function createGZipFileOnDisk(originalFilePath: string, tempFilePath: string): Promise; +/** + * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O + * @param originalFilePath the path to the original file that is being GZipped + * @returns a buffer with the GZip file + */ +export declare function createGZipFileInBuffer(originalFilePath: string): Promise; diff --git a/node_modules/@actions/artifact/lib/internal/upload-gzip.js b/node_modules/@actions/artifact/lib/internal/upload-gzip.js new file mode 100644 index 00000000..b8e2e468 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-gzip.js @@ -0,0 +1,121 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0; +const fs = __importStar(require("fs")); +const zlib = __importStar(require("zlib")); +const util_1 = require("util"); +const stat = util_1.promisify(fs.stat); +/** + * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip + * files then will just waste a lot of time before ultimately being discarded (especially for very large files). + * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is + */ +const gzipExemptFileExtensions = [ + '.gzip', + '.zip', + '.tar.lz', + '.tar.gz', + '.tar.bz2', + '.7z' +]; +/** + * Creates a Gzip compressed file of an original file at the provided temporary filepath location + * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified + * @param {string} tempFilePath the location of where the Gzip file will be created + * @returns the size of gzip file that gets created + */ +function createGZipFileOnDisk(originalFilePath, tempFilePath) { + return __awaiter(this, void 0, void 0, function* () { + for (const gzipExemptExtension of gzipExemptFileExtensions) { + if (originalFilePath.endsWith(gzipExemptExtension)) { + // return a really large number so that the original file gets uploaded + return Number.MAX_SAFE_INTEGER; + } + } + return new Promise((resolve, reject) => { + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + const outputStream = fs.createWriteStream(tempFilePath); + inputStream.pipe(gzip).pipe(outputStream); + outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { + // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload + const size = (yield stat(tempFilePath)).size; + resolve(size); + })); + outputStream.on('error', error => { + // eslint-disable-next-line no-console + console.log(error); + reject; + }); + }); + }); +} +exports.createGZipFileOnDisk = createGZipFileOnDisk; +/** + * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O + * @param originalFilePath the path to the original file that is being GZipped + * @returns a buffer with the GZip file + */ +function createGZipFileInBuffer(originalFilePath) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + var e_1, _a; + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + inputStream.pipe(gzip); + // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 + const chunks = []; + try { + for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { + const chunk = gzip_1_1.value; + chunks.push(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); + } + finally { if (e_1) throw e_1.error; } + } + resolve(Buffer.concat(chunks)); + })); + }); +} +exports.createGZipFileInBuffer = createGZipFileInBuffer; +//# sourceMappingURL=upload-gzip.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-gzip.js.map b/node_modules/@actions/artifact/lib/internal/upload-gzip.js.map new file mode 100644 index 00000000..b649695d --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-gzip.js.map @@ -0,0 +1 @@ +{"version":3,"file":"upload-gzip.js","sourceRoot":"","sources":["../../src/internal/upload-gzip.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,+BAA8B;AAC9B,MAAM,IAAI,GAAG,gBAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAA;AAE/B;;;;GAIG;AACH,MAAM,wBAAwB,GAAG;IAC/B,OAAO;IACP,MAAM;IACN,SAAS;IACT,SAAS;IACT,UAAU;IACV,KAAK;CACN,CAAA;AAED;;;;;GAKG;AACH,SAAsB,oBAAoB,CACxC,gBAAwB,EACxB,YAAoB;;QAEpB,KAAK,MAAM,mBAAmB,IAAI,wBAAwB,EAAE;YAC1D,IAAI,gBAAgB,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;gBAClD,uEAAuE;gBACvE,OAAO,MAAM,CAAC,gBAAgB,CAAA;aAC/B;SACF;QAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,MAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;YACvD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;YACzC,YAAY,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAS,EAAE;gBACnC,qIAAqI;gBACrI,MAAM,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAA;gBAC5C,OAAO,CAAC,IAAI,CAAC,CAAA;YACf,CAAC,CAAA,CAAC,CAAA;YACF,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBAC/B,sCAAsC;gBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;gBAClB,MAAM,CAAA;YACR,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC;CAAA;AA3BD,oDA2BC;AAED;;;;GAIG;AACH,SAAsB,sBAAsB,CAC1C,gBAAwB;;QAExB,OAAO,IAAI,OAAO,CAAC,CAAM,OAAO,EAAC,EAAE;;YACjC,MAAM,WAAW,GAAG,EAAE,CAAC,gBAAgB,CAAC,gBAAgB,CAAC,CAAA;YACzD,MAAM,IAAI,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;YAC9B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;YACtB,8IAA8I;YAC9I,MAAM,MAAM,GAAG,EAAE,CAAA;;gBACjB,KAA0B,IAAA,SAAA,cAAA,IAAI,CAAA,UAAA;oBAAnB,MAAM,KAAK,iBAAA,CAAA;oBACpB,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;iBACnB;;;;;;;;;YACD,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;QAChC,CAAC,CAAA,CAAC,CAAA;IACJ,CAAC;CAAA;AAdD,wDAcC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-http-client.d.ts b/node_modules/@actions/artifact/lib/internal/upload-http-client.d.ts new file mode 100644 index 00000000..bef81e19 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-http-client.d.ts @@ -0,0 +1,48 @@ +import { ArtifactResponse, UploadResults } from './contracts'; +import { UploadSpecification } from './upload-specification'; +import { UploadOptions } from './upload-options'; +export declare class UploadHttpClient { + private uploadHttpManager; + private statusReporter; + constructor(); + /** + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created + */ + createArtifactInFileContainer(artifactName: string, options?: UploadOptions | undefined): Promise; + /** + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes + */ + uploadArtifactToFileContainer(uploadUrl: string, filesToUpload: UploadSpecification[], options?: UploadOptions): Promise; + /** + * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. + * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ + private uploadFileAsync; + /** + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded + * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream + * @param {number} totalFileSize Original total size of the file that is being uploaded + * @returns if the chunk was successfully uploaded + */ + private uploadChunk; + /** + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact + */ + patchArtifactSize(size: number, artifactName: string): Promise; +} diff --git a/node_modules/@actions/artifact/lib/internal/upload-http-client.js b/node_modules/@actions/artifact/lib/internal/upload-http-client.js new file mode 100644 index 00000000..bec9a6a4 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-http-client.js @@ -0,0 +1,409 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UploadHttpClient = void 0; +const fs = __importStar(require("fs")); +const core = __importStar(require("@actions/core")); +const tmp = __importStar(require("tmp-promise")); +const stream = __importStar(require("stream")); +const utils_1 = require("./utils"); +const config_variables_1 = require("./config-variables"); +const util_1 = require("util"); +const url_1 = require("url"); +const perf_hooks_1 = require("perf_hooks"); +const status_reporter_1 = require("./status-reporter"); +const http_client_1 = require("@actions/http-client"); +const http_manager_1 = require("./http-manager"); +const upload_gzip_1 = require("./upload-gzip"); +const requestUtils_1 = require("./requestUtils"); +const stat = util_1.promisify(fs.stat); +class UploadHttpClient { + constructor() { + this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); + this.statusReporter = new status_reporter_1.StatusReporter(10000); + } + /** + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created + */ + createArtifactInFileContainer(artifactName, options) { + return __awaiter(this, void 0, void 0, function* () { + const parameters = { + Type: 'actions_storage', + Name: artifactName + }; + // calculate retention period + if (options && options.retentionDays) { + const maxRetentionStr = config_variables_1.getRetentionDays(); + parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr); + } + const data = JSON.stringify(parameters, null, 2); + const artifactUrl = utils_1.getArtifactUrl(); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + // Extra information to display when a particular HTTP code is returned + // If a 403 is returned when trying to create a file container, the customer has exceeded + // their storage quota so no new artifact containers can be created + const customErrorMessages = new Map([ + [ + http_client_1.HttpCodes.Forbidden, + 'Artifact storage quota has been hit. Unable to upload any new artifacts' + ], + [ + http_client_1.HttpCodes.BadRequest, + `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` + ] + ]); + const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); + const body = yield response.readBody(); + return JSON.parse(body); + }); + } + /** + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes + */ + uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { + return __awaiter(this, void 0, void 0, function* () { + const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); + const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); + core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parameters = []; + // by default, file uploads will continue if there is an error unless specified differently in the options + let continueOnError = true; + if (options) { + if (options.continueOnError === false) { + continueOnError = false; + } + } + // prepare the necessary parameters to upload all the files + for (const file of filesToUpload) { + const resourceUrl = new url_1.URL(uploadUrl); + resourceUrl.searchParams.append('itemPath', file.uploadFilePath); + parameters.push({ + file: file.absoluteFilePath, + resourceUrl: resourceUrl.toString(), + maxChunkSize: MAX_CHUNK_SIZE, + continueOnError + }); + } + const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; + const failedItemsToReport = []; + let currentFile = 0; + let completedFiles = 0; + let uploadFileSize = 0; + let totalFileSize = 0; + let abortPendingFileUploads = false; + this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); + this.statusReporter.start(); + // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors + yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < filesToUpload.length) { + const currentFileParameters = parameters[currentFile]; + currentFile += 1; + if (abortPendingFileUploads) { + failedItemsToReport.push(currentFileParameters.file); + continue; + } + const startTime = perf_hooks_1.performance.now(); + const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); + if (core.isDebug()) { + core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + } + uploadFileSize += uploadFileResult.successfulUploadSize; + totalFileSize += uploadFileResult.totalSize; + if (uploadFileResult.isSuccess === false) { + failedItemsToReport.push(currentFileParameters.file); + if (!continueOnError) { + // fail fast + core.error(`aborting artifact upload`); + abortPendingFileUploads = true; + } + } + this.statusReporter.incrementProcessedCount(); + } + }))); + this.statusReporter.stop(); + // done uploading, safety dispose all connections + this.uploadHttpManager.disposeAndReplaceAllClients(); + core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + return { + uploadSize: uploadFileSize, + totalSize: totalFileSize, + failedItems: failedItemsToReport + }; + }); + } + /** + * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. + * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ + uploadFileAsync(httpClientIndex, parameters) { + return __awaiter(this, void 0, void 0, function* () { + const fileStat = yield stat(parameters.file); + const totalFileSize = fileStat.size; + const isFIFO = fileStat.isFIFO(); + let offset = 0; + let isUploadSuccessful = true; + let failedChunkSizes = 0; + let uploadFileSize = 0; + let isGzip = true; + // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O + // for creating a new GZip file, an in-memory buffer is used for compression + // with named pipes the file size is reported as zero in that case don't read the file in memory + if (!isFIFO && totalFileSize < 65536) { + core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); + // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, + // it will not properly get reset to the start of the stream if a chunk upload needs to be retried + let openUploadStream; + if (totalFileSize < buffer.byteLength) { + // compression did not help with reducing the size, use a readable stream from the original file for upload + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + openUploadStream = () => fs.createReadStream(parameters.file); + isGzip = false; + uploadFileSize = totalFileSize; + } + else { + // create a readable stream using a PassThrough stream that is both readable and writable + core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + openUploadStream = () => { + const passThrough = new stream.PassThrough(); + passThrough.end(buffer); + return passThrough; + }; + uploadFileSize = buffer.byteLength; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // chunk failed to upload + isUploadSuccessful = false; + failedChunkSizes += uploadFileSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + } + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + else { + // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the + // npm tmp-promise package and this file gets used to create a GZipped file + const tempFile = yield tmp.file(); + core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + // create a GZip file of the original file being uploaded, the original file should not be modified in any way + uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); + let uploadFilePath = tempFile.path; + // compression did not help with size reduction, use the original file for upload and delete the temp GZip file + // for named pipes totalFileSize is zero, this assumes compression did help + if (!isFIFO && totalFileSize < uploadFileSize) { + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + uploadFileSize = totalFileSize; + uploadFilePath = parameters.file; + isGzip = false; + } + else { + core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + } + let abortFileUpload = false; + // upload only a single chunk at a time + while (offset < uploadFileSize) { + const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); + const startChunkIndex = offset; + const endChunkIndex = offset + chunkSize - 1; + offset += parameters.maxChunkSize; + if (abortFileUpload) { + // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed + failedChunkSizes += chunkSize; + continue; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { + start: startChunkIndex, + end: endChunkIndex, + autoClose: false + }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was + // successfully uploaded so the server may report a different size for what was uploaded + isUploadSuccessful = false; + failedChunkSizes += chunkSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + abortFileUpload = true; + } + else { + // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status + if (uploadFileSize > 8388608) { + this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize); + } + } + } + // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by + // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about + core.debug(`deleting temporary gzip file ${tempFile.path}`); + yield tempFile.cleanup(); + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + }); + } + /** + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded + * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream + * @param {number} totalFileSize Original total size of the file that is being uploaded + * @returns if the chunk was successfully uploaded + */ + uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { + return __awaiter(this, void 0, void 0, function* () { + // open a new stream and read it to compute the digest + const digest = yield utils_1.digestForStream(openStream()); + // prepare all the necessary headers before making any http call + const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.uploadHttpManager.getClient(httpClientIndex); + return yield client.sendStream('PUT', resourceUrl, openStream(), headers); + }); + let retryCount = 0; + const retryLimit = config_variables_1.getRetryLimit(); + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops + const incrementAndCheckRetryLimit = (response) => { + retryCount++; + if (retryCount > retryLimit) { + if (response) { + utils_1.displayHttpDiagnostics(response); + } + core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + return true; + } + return false; + }; + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + yield utils_1.sleep(retryAfterValue); + } + else { + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + yield utils_1.sleep(backoffTime); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + return; + }); + // allow for failed chunks to be retried multiple times + while (retryCount <= retryLimit) { + let response; + try { + response = yield uploadChunkRequest(); + } + catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the upload + core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + // eslint-disable-next-line no-console + console.log(error); + if (incrementAndCheckRetryLimit()) { + return false; + } + yield backOff(); + continue; + } + // Always read the body of the response. There is potential for a resource leak if the body is not read which will + // result in the connection remaining open along with unintended consequences when trying to dispose of the client + yield response.readBody(); + if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + return true; + } + else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + if (incrementAndCheckRetryLimit(response)) { + return false; + } + utils_1.isThrottledStatusCode(response.message.statusCode) + ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + : yield backOff(); + } + else { + core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + utils_1.displayHttpDiagnostics(response); + return false; + } + } + return false; + }); + } + /** + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact + */ + patchArtifactSize(size, artifactName) { + return __awaiter(this, void 0, void 0, function* () { + const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); + resourceUrl.searchParams.append('artifactName', artifactName); + const parameters = { Size: size }; + const data = JSON.stringify(parameters, null, 2); + core.debug(`URL is ${resourceUrl.toString()}`); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + // Extra information to display when a particular HTTP code is returned + const customErrorMessages = new Map([ + [ + http_client_1.HttpCodes.NotFound, + `An Artifact with the name ${artifactName} was not found` + ] + ]); + // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this + const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); + yield response.readBody(); + core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + }); + } +} +exports.UploadHttpClient = UploadHttpClient; +//# sourceMappingURL=upload-http-client.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-http-client.js.map b/node_modules/@actions/artifact/lib/internal/upload-http-client.js.map new file mode 100644 index 00000000..35ebcfba --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-http-client.js.map @@ -0,0 +1 @@ +{"version":3,"file":"upload-http-client.js","sourceRoot":"","sources":["../../src/internal/upload-http-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,oDAAqC;AACrC,iDAAkC;AAClC,+CAAgC;AAOhC,mCAagB;AAChB,yDAK2B;AAC3B,+BAA8B;AAC9B,6BAAuB;AACvB,2CAAsC;AACtC,uDAAgD;AAChD,sDAAkE;AAClE,iDAA0C;AAG1C,+CAA0E;AAC1E,iDAAqD;AACrD,MAAM,IAAI,GAAG,gBAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAA;AAE/B,MAAa,gBAAgB;IAI3B;QACE,IAAI,CAAC,iBAAiB,GAAG,IAAI,0BAAW,CACtC,2CAAwB,EAAE,EAC1B,0BAA0B,CAC3B,CAAA;QACD,IAAI,CAAC,cAAc,GAAG,IAAI,gCAAc,CAAC,KAAK,CAAC,CAAA;IACjD,CAAC;IAED;;;;OAIG;IACG,6BAA6B,CACjC,YAAoB,EACpB,OAAmC;;YAEnC,MAAM,UAAU,GAA6B;gBAC3C,IAAI,EAAE,iBAAiB;gBACvB,IAAI,EAAE,YAAY;aACnB,CAAA;YAED,6BAA6B;YAC7B,IAAI,OAAO,IAAI,OAAO,CAAC,aAAa,EAAE;gBACpC,MAAM,eAAe,GAAG,mCAAgB,EAAE,CAAA;gBAC1C,UAAU,CAAC,aAAa,GAAG,0BAAkB,CAC3C,OAAO,CAAC,aAAa,EACrB,eAAe,CAChB,CAAA;aACF;YAED,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACxD,MAAM,WAAW,GAAG,sBAAc,EAAE,CAAA;YAEpC,+GAA+G;YAC/G,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YAClD,MAAM,OAAO,GAAG,wBAAgB,CAAC,kBAAkB,EAAE,KAAK,CAAC,CAAA;YAE3D,uEAAuE;YACvE,yFAAyF;YACzF,mEAAmE;YACnE,MAAM,mBAAmB,GAAwB,IAAI,GAAG,CAAC;gBACvD;oBACE,uBAAS,CAAC,SAAS;oBACnB,yEAAyE;iBAC1E;gBACD;oBACE,uBAAS,CAAC,UAAU;oBACpB,qBAAqB,YAAY,8BAA8B,WAAW,EAAE;iBAC7E;aACF,CAAC,CAAA;YAEF,MAAM,QAAQ,GAAG,MAAM,qCAAsB,CAC3C,2BAA2B,EAC3B,GAAS,EAAE,gDAAC,OAAA,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA,GAAA,EACnD,mBAAmB,CACpB,CAAA;YACD,MAAM,IAAI,GAAW,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;YAC9C,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;QACzB,CAAC;KAAA;IAED;;;;;OAKG;IACG,6BAA6B,CACjC,SAAiB,EACjB,aAAoC,EACpC,OAAuB;;YAEvB,MAAM,gBAAgB,GAAG,2CAAwB,EAAE,CAAA;YACnD,MAAM,cAAc,GAAG,qCAAkB,EAAE,CAAA;YAC3C,IAAI,CAAC,KAAK,CACR,qBAAqB,gBAAgB,qBAAqB,cAAc,EAAE,CAC3E,CAAA;YAED,MAAM,UAAU,GAA2B,EAAE,CAAA;YAC7C,0GAA0G;YAC1G,IAAI,eAAe,GAAG,IAAI,CAAA;YAC1B,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,eAAe,KAAK,KAAK,EAAE;oBACrC,eAAe,GAAG,KAAK,CAAA;iBACxB;aACF;YAED,2DAA2D;YAC3D,KAAK,MAAM,IAAI,IAAI,aAAa,EAAE;gBAChC,MAAM,WAAW,GAAG,IAAI,SAAG,CAAC,SAAS,CAAC,CAAA;gBACtC,WAAW,CAAC,YAAY,CAAC,MAAM,CAAC,UAAU,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;gBAChE,UAAU,CAAC,IAAI,CAAC;oBACd,IAAI,EAAE,IAAI,CAAC,gBAAgB;oBAC3B,WAAW,EAAE,WAAW,CAAC,QAAQ,EAAE;oBACnC,YAAY,EAAE,cAAc;oBAC5B,eAAe;iBAChB,CAAC,CAAA;aACH;YAED,MAAM,eAAe,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YAC/D,MAAM,mBAAmB,GAAa,EAAE,CAAA;YACxC,IAAI,WAAW,GAAG,CAAC,CAAA;YACnB,IAAI,cAAc,GAAG,CAAC,CAAA;YACtB,IAAI,cAAc,GAAG,CAAC,CAAA;YACtB,IAAI,aAAa,GAAG,CAAC,CAAA;YACrB,IAAI,uBAAuB,GAAG,KAAK,CAAA;YAEnC,IAAI,CAAC,cAAc,CAAC,8BAA8B,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;YACxE,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,CAAA;YAE3B,uGAAuG;YACvG,MAAM,OAAO,CAAC,GAAG,CACf,eAAe,CAAC,GAAG,CAAC,CAAM,KAAK,EAAC,EAAE;gBAChC,OAAO,WAAW,GAAG,aAAa,CAAC,MAAM,EAAE;oBACzC,MAAM,qBAAqB,GAAG,UAAU,CAAC,WAAW,CAAC,CAAA;oBACrD,WAAW,IAAI,CAAC,CAAA;oBAChB,IAAI,uBAAuB,EAAE;wBAC3B,mBAAmB,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAA;wBACpD,SAAQ;qBACT;oBAED,MAAM,SAAS,GAAG,wBAAW,CAAC,GAAG,EAAE,CAAA;oBACnC,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,eAAe,CACjD,KAAK,EACL,qBAAqB,CACtB,CAAA;oBAED,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;wBAClB,IAAI,CAAC,KAAK,CACR,SAAS,EAAE,cAAc,IAAI,aAAa,CAAC,MAAM,KAC/C,qBAAqB,CAAC,IACxB,SAAS,CAAC,wBAAW,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC,CAAC,OAAO,CAC9C,CAAC,CACF,gCAAgC,CAClC,CAAA;qBACF;oBAED,cAAc,IAAI,gBAAgB,CAAC,oBAAoB,CAAA;oBACvD,aAAa,IAAI,gBAAgB,CAAC,SAAS,CAAA;oBAC3C,IAAI,gBAAgB,CAAC,SAAS,KAAK,KAAK,EAAE;wBACxC,mBAAmB,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAA;wBACpD,IAAI,CAAC,eAAe,EAAE;4BACpB,YAAY;4BACZ,IAAI,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAA;4BACtC,uBAAuB,GAAG,IAAI,CAAA;yBAC/B;qBACF;oBACD,IAAI,CAAC,cAAc,CAAC,uBAAuB,EAAE,CAAA;iBAC9C;YACH,CAAC,CAAA,CAAC,CACH,CAAA;YAED,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAA;YAC1B,iDAAiD;YACjD,IAAI,CAAC,iBAAiB,CAAC,2BAA2B,EAAE,CAAA;YAEpD,IAAI,CAAC,IAAI,CAAC,2CAA2C,cAAc,QAAQ,CAAC,CAAA;YAC5E,OAAO;gBACL,UAAU,EAAE,cAAc;gBAC1B,SAAS,EAAE,aAAa;gBACxB,WAAW,EAAE,mBAAmB;aACjC,CAAA;QACH,CAAC;KAAA;IAED;;;;;;OAMG;IACW,eAAe,CAC3B,eAAuB,EACvB,UAAgC;;YAEhC,MAAM,QAAQ,GAAa,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;YACtD,MAAM,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAA;YACnC,MAAM,MAAM,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAA;YAChC,IAAI,MAAM,GAAG,CAAC,CAAA;YACd,IAAI,kBAAkB,GAAG,IAAI,CAAA;YAC7B,IAAI,gBAAgB,GAAG,CAAC,CAAA;YACxB,IAAI,cAAc,GAAG,CAAC,CAAA;YACtB,IAAI,MAAM,GAAG,IAAI,CAAA;YAEjB,2GAA2G;YAC3G,4EAA4E;YAC5E,gGAAgG;YAChG,IAAI,CAAC,MAAM,IAAI,aAAa,GAAG,KAAK,EAAE;gBACpC,IAAI,CAAC,KAAK,CACR,GAAG,UAAU,CAAC,IAAI,iGAAiG,CACpH,CAAA;gBACD,MAAM,MAAM,GAAG,MAAM,oCAAsB,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;gBAE5D,6HAA6H;gBAC7H,kGAAkG;gBAClG,IAAI,gBAA6C,CAAA;gBAEjD,IAAI,aAAa,GAAG,MAAM,CAAC,UAAU,EAAE;oBACrC,2GAA2G;oBAC3G,IAAI,CAAC,KAAK,CACR,6BAA6B,UAAU,CAAC,IAAI,4FAA4F,CACzI,CAAA;oBACD,gBAAgB,GAAG,GAAG,EAAE,CAAC,EAAE,CAAC,gBAAgB,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;oBAC7D,MAAM,GAAG,KAAK,CAAA;oBACd,cAAc,GAAG,aAAa,CAAA;iBAC/B;qBAAM;oBACL,yFAAyF;oBACzF,IAAI,CAAC,KAAK,CACR,2BAA2B,UAAU,CAAC,IAAI,4FAA4F,CACvI,CAAA;oBACD,gBAAgB,GAAG,GAAG,EAAE;wBACtB,MAAM,WAAW,GAAG,IAAI,MAAM,CAAC,WAAW,EAAE,CAAA;wBAC5C,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;wBACvB,OAAO,WAAW,CAAA;oBACpB,CAAC,CAAA;oBACD,cAAc,GAAG,MAAM,CAAC,UAAU,CAAA;iBACnC;gBAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CACnC,eAAe,EACf,UAAU,CAAC,WAAW,EACtB,gBAAgB,EAChB,CAAC,EACD,cAAc,GAAG,CAAC,EAClB,cAAc,EACd,MAAM,EACN,aAAa,CACd,CAAA;gBAED,IAAI,CAAC,MAAM,EAAE;oBACX,yBAAyB;oBACzB,kBAAkB,GAAG,KAAK,CAAA;oBAC1B,gBAAgB,IAAI,cAAc,CAAA;oBAClC,IAAI,CAAC,OAAO,CAAC,uBAAuB,UAAU,CAAC,IAAI,iBAAiB,CAAC,CAAA;iBACtE;gBAED,OAAO;oBACL,SAAS,EAAE,kBAAkB;oBAC7B,oBAAoB,EAAE,cAAc,GAAG,gBAAgB;oBACvD,SAAS,EAAE,aAAa;iBACzB,CAAA;aACF;iBAAM;gBACL,+GAA+G;gBAC/G,2EAA2E;gBAC3E,MAAM,QAAQ,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,CAAA;gBACjC,IAAI,CAAC,KAAK,CACR,GAAG,UAAU,CAAC,IAAI,8DAA8D,QAAQ,CAAC,IAAI,wCAAwC,CACtI,CAAA;gBAED,8GAA8G;gBAC9G,cAAc,GAAG,MAAM,kCAAoB,CACzC,UAAU,CAAC,IAAI,EACf,QAAQ,CAAC,IAAI,CACd,CAAA;gBAED,IAAI,cAAc,GAAG,QAAQ,CAAC,IAAI,CAAA;gBAElC,+GAA+G;gBAC/G,2EAA2E;gBAC3E,IAAI,CAAC,MAAM,IAAI,aAAa,GAAG,cAAc,EAAE;oBAC7C,IAAI,CAAC,KAAK,CACR,6BAA6B,UAAU,CAAC,IAAI,4FAA4F,CACzI,CAAA;oBACD,cAAc,GAAG,aAAa,CAAA;oBAC9B,cAAc,GAAG,UAAU,CAAC,IAAI,CAAA;oBAChC,MAAM,GAAG,KAAK,CAAA;iBACf;qBAAM;oBACL,IAAI,CAAC,KAAK,CACR,6BAA6B,UAAU,CAAC,IAAI,2EAA2E,CACxH,CAAA;iBACF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAA;gBAC3B,uCAAuC;gBACvC,OAAO,MAAM,GAAG,cAAc,EAAE;oBAC9B,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CACxB,cAAc,GAAG,MAAM,EACvB,UAAU,CAAC,YAAY,CACxB,CAAA;oBAED,MAAM,eAAe,GAAG,MAAM,CAAA;oBAC9B,MAAM,aAAa,GAAG,MAAM,GAAG,SAAS,GAAG,CAAC,CAAA;oBAC5C,MAAM,IAAI,UAAU,CAAC,YAAY,CAAA;oBAEjC,IAAI,eAAe,EAAE;wBACnB,4GAA4G;wBAC5G,gBAAgB,IAAI,SAAS,CAAA;wBAC7B,SAAQ;qBACT;oBAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,WAAW,CACnC,eAAe,EACf,UAAU,CAAC,WAAW,EACtB,GAAG,EAAE,CACH,EAAE,CAAC,gBAAgB,CAAC,cAAc,EAAE;wBAClC,KAAK,EAAE,eAAe;wBACtB,GAAG,EAAE,aAAa;wBAClB,SAAS,EAAE,KAAK;qBACjB,CAAC,EACJ,eAAe,EACf,aAAa,EACb,cAAc,EACd,MAAM,EACN,aAAa,CACd,CAAA;oBAED,IAAI,CAAC,MAAM,EAAE;wBACX,+IAA+I;wBAC/I,wFAAwF;wBACxF,kBAAkB,GAAG,KAAK,CAAA;wBAC1B,gBAAgB,IAAI,SAAS,CAAA;wBAC7B,IAAI,CAAC,OAAO,CAAC,uBAAuB,UAAU,CAAC,IAAI,iBAAiB,CAAC,CAAA;wBACrE,eAAe,GAAG,IAAI,CAAA;qBACvB;yBAAM;wBACL,qHAAqH;wBACrH,IAAI,cAAc,GAAG,OAAO,EAAE;4BAC5B,IAAI,CAAC,cAAc,CAAC,qBAAqB,CACvC,UAAU,CAAC,IAAI,EACf,eAAe,EACf,aAAa,EACb,cAAc,CACf,CAAA;yBACF;qBACF;iBACF;gBAED,sHAAsH;gBACtH,mIAAmI;gBACnI,IAAI,CAAC,KAAK,CAAC,gCAAgC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAA;gBAC3D,MAAM,QAAQ,CAAC,OAAO,EAAE,CAAA;gBAExB,OAAO;oBACL,SAAS,EAAE,kBAAkB;oBAC7B,oBAAoB,EAAE,cAAc,GAAG,gBAAgB;oBACvD,SAAS,EAAE,aAAa;iBACzB,CAAA;aACF;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;OAYG;IACW,WAAW,CACvB,eAAuB,EACvB,WAAmB,EACnB,UAAuC,EACvC,KAAa,EACb,GAAW,EACX,cAAsB,EACtB,MAAe,EACf,aAAqB;;YAErB,sDAAsD;YACtD,MAAM,MAAM,GAAG,MAAM,uBAAe,CAAC,UAAU,EAAE,CAAC,CAAA;YAElD,gEAAgE;YAChE,MAAM,OAAO,GAAG,wBAAgB,CAC9B,0BAA0B,EAC1B,IAAI,EACJ,MAAM,EACN,aAAa,EACb,GAAG,GAAG,KAAK,GAAG,CAAC,EACf,uBAAe,CAAC,KAAK,EAAE,GAAG,EAAE,cAAc,CAAC,EAC3C,MAAM,CACP,CAAA;YAED,MAAM,kBAAkB,GAAG,GAAsC,EAAE;gBACjE,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;gBAChE,OAAO,MAAM,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,WAAW,EAAE,UAAU,EAAE,EAAE,OAAO,CAAC,CAAA;YAC3E,CAAC,CAAA,CAAA;YAED,IAAI,UAAU,GAAG,CAAC,CAAA;YAClB,MAAM,UAAU,GAAG,gCAAa,EAAE,CAAA;YAElC,yFAAyF;YACzF,kEAAkE;YAClE,MAAM,2BAA2B,GAAG,CAClC,QAA6B,EACpB,EAAE;gBACX,UAAU,EAAE,CAAA;gBACZ,IAAI,UAAU,GAAG,UAAU,EAAE;oBAC3B,IAAI,QAAQ,EAAE;wBACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;qBACjC;oBACD,IAAI,CAAC,IAAI,CACP,oDAAoD,KAAK,OAAO,WAAW,EAAE,CAC9E,CAAA;oBACD,OAAO,IAAI,CAAA;iBACZ;gBACD,OAAO,KAAK,CAAA;YACd,CAAC,CAAA;YAED,MAAM,OAAO,GAAG,CAAO,eAAwB,EAAiB,EAAE;gBAChE,IAAI,CAAC,iBAAiB,CAAC,uBAAuB,CAAC,eAAe,CAAC,CAAA;gBAC/D,IAAI,eAAe,EAAE;oBACnB,IAAI,CAAC,IAAI,CACP,4CAA4C,UAAU,iBAAiB,eAAe,4CAA4C,CACnI,CAAA;oBACD,MAAM,aAAK,CAAC,eAAe,CAAC,CAAA;iBAC7B;qBAAM;oBACL,MAAM,WAAW,GAAG,6CAAqC,CAAC,UAAU,CAAC,CAAA;oBACrE,IAAI,CAAC,IAAI,CACP,kCAAkC,UAAU,iBAAiB,WAAW,wDAAwD,KAAK,EAAE,CACxI,CAAA;oBACD,MAAM,aAAK,CAAC,WAAW,CAAC,CAAA;iBACzB;gBACD,IAAI,CAAC,IAAI,CACP,+BAA+B,UAAU,0BAA0B,CACpE,CAAA;gBACD,OAAM;YACR,CAAC,CAAA,CAAA;YAED,uDAAuD;YACvD,OAAO,UAAU,IAAI,UAAU,EAAE;gBAC/B,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,kBAAkB,EAAE,CAAA;iBACtC;gBAAC,OAAO,KAAK,EAAE;oBACd,mFAAmF;oBACnF,IAAI,CAAC,IAAI,CACP,8CAA8C,eAAe,uBAAuB,CACrF,CAAA;oBACD,sCAAsC;oBACtC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAA;oBAElB,IAAI,2BAA2B,EAAE,EAAE;wBACjC,OAAO,KAAK,CAAA;qBACb;oBACD,MAAM,OAAO,EAAE,CAAA;oBACf,SAAQ;iBACT;gBAED,kHAAkH;gBAClH,kHAAkH;gBAClH,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;gBAEzB,IAAI,2BAAmB,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;oBACpD,OAAO,IAAI,CAAA;iBACZ;qBAAM,IAAI,6BAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;oBAC7D,IAAI,CAAC,IAAI,CACP,KAAK,QAAQ,CAAC,OAAO,CAAC,UAAU,kEAAkE,CACnG,CAAA;oBACD,IAAI,2BAA2B,CAAC,QAAQ,CAAC,EAAE;wBACzC,OAAO,KAAK,CAAA;qBACb;oBACD,6BAAqB,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;wBAChD,CAAC,CAAC,MAAM,OAAO,CACX,+CAAuC,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAClE;wBACH,CAAC,CAAC,MAAM,OAAO,EAAE,CAAA;iBACpB;qBAAM;oBACL,IAAI,CAAC,KAAK,CACR,kDAAkD,WAAW,EAAE,CAChE,CAAA;oBACD,8BAAsB,CAAC,QAAQ,CAAC,CAAA;oBAChC,OAAO,KAAK,CAAA;iBACb;aACF;YACD,OAAO,KAAK,CAAA;QACd,CAAC;KAAA;IAED;;;OAGG;IACG,iBAAiB,CAAC,IAAY,EAAE,YAAoB;;YACxD,MAAM,WAAW,GAAG,IAAI,SAAG,CAAC,sBAAc,EAAE,CAAC,CAAA;YAC7C,WAAW,CAAC,YAAY,CAAC,MAAM,CAAC,cAAc,EAAE,YAAY,CAAC,CAAA;YAE7D,MAAM,UAAU,GAAsB,EAAC,IAAI,EAAE,IAAI,EAAC,CAAA;YAClD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACxD,IAAI,CAAC,KAAK,CAAC,UAAU,WAAW,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;YAE9C,+GAA+G;YAC/G,MAAM,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YAClD,MAAM,OAAO,GAAG,wBAAgB,CAAC,kBAAkB,EAAE,KAAK,CAAC,CAAA;YAE3D,uEAAuE;YACvE,MAAM,mBAAmB,GAAwB,IAAI,GAAG,CAAC;gBACvD;oBACE,uBAAS,CAAC,QAAQ;oBAClB,6BAA6B,YAAY,gBAAgB;iBAC1D;aACF,CAAC,CAAA;YAEF,0IAA0I;YAC1I,MAAM,QAAQ,GAAG,MAAM,qCAAsB,CAC3C,0BAA0B,EAC1B,GAAS,EAAE,gDAAC,OAAA,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA,GAAA,EAC/D,mBAAmB,CACpB,CAAA;YACD,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;YACzB,IAAI,CAAC,KAAK,CACR,YAAY,YAAY,yDAAyD,IAAI,EAAE,CACxF,CAAA;QACH,CAAC;KAAA;CACF;AA/fD,4CA+fC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-options.d.ts b/node_modules/@actions/artifact/lib/internal/upload-options.d.ts new file mode 100644 index 00000000..34dd8557 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-options.d.ts @@ -0,0 +1,34 @@ +export interface UploadOptions { + /** + * Indicates if the artifact upload should continue if file or chunk fails to upload from any error. + * If there is a error during upload, a partial artifact will always be associated and available for + * download at the end. The size reported will be the amount of storage that the user or org will be + * charged for the partial artifact. Defaults to true if not specified + * + * If set to false, and an error is encountered, all other uploads will stop and any files or chunks + * that were queued will not be attempted to be uploaded. The partial artifact available will only + * include files and chunks up until the failure + * + * If set to true and an error is encountered, the failed file will be skipped and ignored and all + * other queued files will be attempted to be uploaded. The partial artifact at the end will have all + * files with the exception of the problematic files(s)/chunks(s) that failed to upload + * + */ + continueOnError?: boolean; + /** + * Duration after which artifact will expire in days. + * + * By default artifact expires after 90 days: + * https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete + * + * Use this option to override the default expiry. + * + * Min value: 1 + * Max value: 90 unless changed by repository setting + * + * If this is set to a greater value than the retention settings allowed, the retention on artifacts + * will be reduced to match the max value allowed on server, and the upload process will continue. An + * input of 0 assumes default retention setting. + */ + retentionDays?: number; +} diff --git a/node_modules/@actions/artifact/lib/internal/upload-options.js b/node_modules/@actions/artifact/lib/internal/upload-options.js new file mode 100644 index 00000000..374bd8fc --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=upload-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-options.js.map b/node_modules/@actions/artifact/lib/internal/upload-options.js.map new file mode 100644 index 00000000..37318d6e --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"upload-options.js","sourceRoot":"","sources":["../../src/internal/upload-options.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-response.d.ts b/node_modules/@actions/artifact/lib/internal/upload-response.d.ts new file mode 100644 index 00000000..ebbb21b8 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-response.d.ts @@ -0,0 +1,19 @@ +export interface UploadResponse { + /** + * The name of the artifact that was uploaded + */ + artifactName: string; + /** + * A list of all items that are meant to be uploaded as part of the artifact + */ + artifactItems: string[]; + /** + * Total size of the artifact in bytes that was uploaded + */ + size: number; + /** + * A list of items that were not uploaded as part of the artifact (includes queued items that were not uploaded if + * continueOnError is set to false). This is a subset of artifactItems. + */ + failedItems: string[]; +} diff --git a/node_modules/@actions/artifact/lib/internal/upload-response.js b/node_modules/@actions/artifact/lib/internal/upload-response.js new file mode 100644 index 00000000..3b424ab3 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-response.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=upload-response.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-response.js.map b/node_modules/@actions/artifact/lib/internal/upload-response.js.map new file mode 100644 index 00000000..9a8accd2 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-response.js.map @@ -0,0 +1 @@ +{"version":3,"file":"upload-response.js","sourceRoot":"","sources":["../../src/internal/upload-response.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-specification.d.ts b/node_modules/@actions/artifact/lib/internal/upload-specification.d.ts new file mode 100644 index 00000000..4bfddad8 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-specification.d.ts @@ -0,0 +1,11 @@ +export interface UploadSpecification { + absoluteFilePath: string; + uploadFilePath: string; +} +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +export declare function getUploadSpecification(artifactName: string, rootDirectory: string, artifactFiles: string[]): UploadSpecification[]; diff --git a/node_modules/@actions/artifact/lib/internal/upload-specification.js b/node_modules/@actions/artifact/lib/internal/upload-specification.js new file mode 100644 index 00000000..214cfc8b --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-specification.js @@ -0,0 +1,101 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUploadSpecification = void 0; +const fs = __importStar(require("fs")); +const core_1 = require("@actions/core"); +const path_1 = require("path"); +const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation"); +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { + // artifact name was checked earlier on, no need to check again + const specifications = []; + if (!fs.existsSync(rootDirectory)) { + throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); + } + if (!fs.lstatSync(rootDirectory).isDirectory()) { + throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); + } + // Normalize and resolve, this allows for either absolute or relative paths to be used + rootDirectory = path_1.normalize(rootDirectory); + rootDirectory = path_1.resolve(rootDirectory); + /* + Example to demonstrate behavior + + Input: + artifactName: my-artifact + rootDirectory: '/home/user/files/plz-upload' + artifactFiles: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ] + + Output: + specifications: [ + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] + ] + */ + for (let file of artifactFiles) { + if (!fs.existsSync(file)) { + throw new Error(`File ${file} does not exist`); + } + if (!fs.lstatSync(file).isDirectory()) { + // Normalize and resolve, this allows for either absolute or relative paths to be used + file = path_1.normalize(file); + file = path_1.resolve(file); + if (!file.startsWith(rootDirectory)) { + throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); + } + // Check for forbidden characters in file paths that will be rejected during upload + const uploadPath = file.replace(rootDirectory, ''); + path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); + /* + uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all + be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts + + path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt + join('artifact-name/', 'file-to-upload.txt') + join('artifact-name/', '/file-to-upload.txt') + join('artifact-name', 'file-to-upload.txt') + join('artifact-name', '/file-to-upload.txt') + */ + specifications.push({ + absoluteFilePath: file, + uploadFilePath: path_1.join(artifactName, uploadPath) + }); + } + else { + // Directories are rejected by the server during upload + core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); + } + } + return specifications; +} +exports.getUploadSpecification = getUploadSpecification; +//# sourceMappingURL=upload-specification.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/upload-specification.js.map b/node_modules/@actions/artifact/lib/internal/upload-specification.js.map new file mode 100644 index 00000000..dfc17d35 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/upload-specification.js.map @@ -0,0 +1 @@ +{"version":3,"file":"upload-specification.js","sourceRoot":"","sources":["../../src/internal/upload-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,wCAAmC;AACnC,+BAA6C;AAC7C,2FAAyE;AAOzE;;;;;GAKG;AACH,SAAgB,sBAAsB,CACpC,YAAoB,EACpB,aAAqB,EACrB,aAAuB;IAEvB,+DAA+D;IAC/D,MAAM,cAAc,GAA0B,EAAE,CAAA;IAEhD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;QACjC,MAAM,IAAI,KAAK,CAAC,0BAA0B,aAAa,iBAAiB,CAAC,CAAA;KAC1E;IACD,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,WAAW,EAAE,EAAE;QAC9C,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,2BAA2B,CACnE,CAAA;KACF;IACD,sFAAsF;IACtF,aAAa,GAAG,gBAAS,CAAC,aAAa,CAAC,CAAA;IACxC,aAAa,GAAG,cAAO,CAAC,aAAa,CAAC,CAAA;IAEtC;;;;;;;;;;;;;;;;;;MAkBE;IACF,KAAK,IAAI,IAAI,IAAI,aAAa,EAAE;QAC9B,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,QAAQ,IAAI,iBAAiB,CAAC,CAAA;SAC/C;QACD,IAAI,CAAC,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE;YACrC,sFAAsF;YACtF,IAAI,GAAG,gBAAS,CAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,cAAO,CAAC,IAAI,CAAC,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,sBAAsB,aAAa,2CAA2C,IAAI,EAAE,CACrF,CAAA;aACF;YAED,mFAAmF;YACnF,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAA;YAClD,yDAAqB,CAAC,UAAU,CAAC,CAAA;YAEjC;;;;;;;;;cASE;YACF,cAAc,CAAC,IAAI,CAAC;gBAClB,gBAAgB,EAAE,IAAI;gBACtB,cAAc,EAAE,WAAI,CAAC,YAAY,EAAE,UAAU,CAAC;aAC/C,CAAC,CAAA;SACH;aAAM;YACL,uDAAuD;YACvD,YAAK,CAAC,YAAY,IAAI,kDAAkD,CAAC,CAAA;SAC1E;KACF;IACD,OAAO,cAAc,CAAA;AACvB,CAAC;AA7ED,wDA6EC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/utils.d.ts b/node_modules/@actions/artifact/lib/internal/utils.d.ts new file mode 100644 index 00000000..5a5637ac --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/utils.d.ts @@ -0,0 +1,70 @@ +/// +import { IncomingHttpHeaders, OutgoingHttpHeaders } from 'http'; +import { HttpClient, HttpClientResponse } from '@actions/http-client'; +/** + * Returns a retry time in milliseconds that exponentially gets larger + * depending on the amount of retries that have been attempted + */ +export declare function getExponentialRetryTimeInMilliseconds(retryCount: number): number; +/** + * Parses a env variable that is a number + */ +export declare function parseEnvNumber(key: string): number | undefined; +/** + * Various utility functions to help with the necessary API calls + */ +export declare function getApiVersion(): string; +export declare function isSuccessStatusCode(statusCode?: number): boolean; +export declare function isForbiddenStatusCode(statusCode?: number): boolean; +export declare function isRetryableStatusCode(statusCode: number | undefined): boolean; +export declare function isThrottledStatusCode(statusCode?: number): boolean; +/** + * Attempts to get the retry-after value from a set of http headers. The retry time + * is originally denoted in seconds, so if present, it is converted to milliseconds + * @param headers all the headers received when making an http call + */ +export declare function tryGetRetryAfterValueTimeInMilliseconds(headers: IncomingHttpHeaders): number | undefined; +export declare function getContentRange(start: number, end: number, total: number): string; +/** + * Sets all the necessary headers when downloading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} acceptGzip can we accept a gzip encoded response + * @param {string} acceptType the type of content that we can accept + * @returns appropriate headers to make a specific http call during artifact download + */ +export declare function getDownloadHeaders(contentType: string, isKeepAlive?: boolean, acceptGzip?: boolean): OutgoingHttpHeaders; +/** + * Sets all the necessary headers when uploading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} isGzip is the connection being used to upload GZip compressed content + * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed + * @param {number} contentLength the length of the content that is being uploaded + * @param {string} contentRange the range of the content that is being uploaded + * @returns appropriate headers to make a specific http call during artifact upload + */ +export declare function getUploadHeaders(contentType: string, isKeepAlive?: boolean, isGzip?: boolean, uncompressedLength?: number, contentLength?: number, contentRange?: string, digest?: StreamDigest): OutgoingHttpHeaders; +export declare function createHttpClient(userAgent: string): HttpClient; +export declare function getArtifactUrl(): string; +/** + * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information + * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but + * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only + * the information that we want. + * + * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. + * Other information such as the headers, the response code and message might be useful, so this is displayed. + */ +export declare function displayHttpDiagnostics(response: HttpClientResponse): void; +export declare function createDirectoriesForArtifact(directories: string[]): Promise; +export declare function createEmptyFilesForArtifact(emptyFilesToCreate: string[]): Promise; +export declare function getFileSize(filePath: string): Promise; +export declare function rmFile(filePath: string): Promise; +export declare function getProperRetention(retentionInput: number, retentionSetting: string | undefined): number; +export declare function sleep(milliseconds: number): Promise; +export interface StreamDigest { + crc64: string; + md5: string; +} +export declare function digestForStream(stream: NodeJS.ReadableStream): Promise; diff --git a/node_modules/@actions/artifact/lib/internal/utils.js b/node_modules/@actions/artifact/lib/internal/utils.js new file mode 100644 index 00000000..0d866983 --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/utils.js @@ -0,0 +1,292 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.digestForStream = exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0; +const crypto_1 = __importDefault(require("crypto")); +const fs_1 = require("fs"); +const core_1 = require("@actions/core"); +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const config_variables_1 = require("./config-variables"); +const crc64_1 = __importDefault(require("./crc64")); +/** + * Returns a retry time in milliseconds that exponentially gets larger + * depending on the amount of retries that have been attempted + */ +function getExponentialRetryTimeInMilliseconds(retryCount) { + if (retryCount < 0) { + throw new Error('RetryCount should not be negative'); + } + else if (retryCount === 0) { + return config_variables_1.getInitialRetryIntervalInMilliseconds(); + } + const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; + const maxTime = minTime * config_variables_1.getRetryMultiplier(); + // returns a random number between the minTime (inclusive) and the maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); +} +exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds; +/** + * Parses a env variable that is a number + */ +function parseEnvNumber(key) { + const value = Number(process.env[key]); + if (Number.isNaN(value) || value < 0) { + return undefined; + } + return value; +} +exports.parseEnvNumber = parseEnvNumber; +/** + * Various utility functions to help with the necessary API calls + */ +function getApiVersion() { + return '6.0-preview'; +} +exports.getApiVersion = getApiVersion; +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isForbiddenStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode === http_client_1.HttpCodes.Forbidden; +} +exports.isForbiddenStatusCode = isForbiddenStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.TooManyRequests, + 413 // Payload Too Large + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function isThrottledStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode === http_client_1.HttpCodes.TooManyRequests; +} +exports.isThrottledStatusCode = isThrottledStatusCode; +/** + * Attempts to get the retry-after value from a set of http headers. The retry time + * is originally denoted in seconds, so if present, it is converted to milliseconds + * @param headers all the headers received when making an http call + */ +function tryGetRetryAfterValueTimeInMilliseconds(headers) { + if (headers['retry-after']) { + const retryTime = Number(headers['retry-after']); + if (!isNaN(retryTime)) { + core_1.info(`Retry-After header is present with a value of ${retryTime}`); + return retryTime * 1000; + } + core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); + return undefined; + } + core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`); + // eslint-disable-next-line no-console + console.log(headers); + return undefined; +} +exports.tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds; +function getContentRange(start, end, total) { + // Format: `bytes start-end/fileSize + // start and end are inclusive + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/200 + return `bytes ${start}-${end}/${total}`; +} +exports.getContentRange = getContentRange; +/** + * Sets all the necessary headers when downloading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} acceptGzip can we accept a gzip encoded response + * @param {string} acceptType the type of content that we can accept + * @returns appropriate headers to make a specific http call during artifact download + */ +function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) { + const requestOptions = {}; + if (contentType) { + requestOptions['Content-Type'] = contentType; + } + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive'; + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10'; + } + if (acceptGzip) { + // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header + requestOptions['Accept-Encoding'] = 'gzip'; + requestOptions['Accept'] = `application/octet-stream;api-version=${getApiVersion()}`; + } + else { + // default to application/json if we are not working with gzip content + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; + } + return requestOptions; +} +exports.getDownloadHeaders = getDownloadHeaders; +/** + * Sets all the necessary headers when uploading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} isGzip is the connection being used to upload GZip compressed content + * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed + * @param {number} contentLength the length of the content that is being uploaded + * @param {string} contentRange the range of the content that is being uploaded + * @returns appropriate headers to make a specific http call during artifact upload + */ +function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange, digest) { + const requestOptions = {}; + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; + if (contentType) { + requestOptions['Content-Type'] = contentType; + } + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive'; + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10'; + } + if (isGzip) { + requestOptions['Content-Encoding'] = 'gzip'; + requestOptions['x-tfs-filelength'] = uncompressedLength; + } + if (contentLength) { + requestOptions['Content-Length'] = contentLength; + } + if (contentRange) { + requestOptions['Content-Range'] = contentRange; + } + if (digest) { + requestOptions['x-actions-results-crc64'] = digest.crc64; + requestOptions['x-actions-results-md5'] = digest.md5; + } + return requestOptions; +} +exports.getUploadHeaders = getUploadHeaders; +function createHttpClient(userAgent) { + return new http_client_1.HttpClient(userAgent, [ + new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken()) + ]); +} +exports.createHttpClient = createHttpClient; +function getArtifactUrl() { + const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; + core_1.debug(`Artifact Url: ${artifactUrl}`); + return artifactUrl; +} +exports.getArtifactUrl = getArtifactUrl; +/** + * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information + * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but + * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only + * the information that we want. + * + * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. + * Other information such as the headers, the response code and message might be useful, so this is displayed. + */ +function displayHttpDiagnostics(response) { + core_1.info(`##### Begin Diagnostic HTTP information ##### +Status Code: ${response.message.statusCode} +Status Message: ${response.message.statusMessage} +Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} +###### End Diagnostic HTTP information ######`); +} +exports.displayHttpDiagnostics = displayHttpDiagnostics; +function createDirectoriesForArtifact(directories) { + return __awaiter(this, void 0, void 0, function* () { + for (const directory of directories) { + yield fs_1.promises.mkdir(directory, { + recursive: true + }); + } + }); +} +exports.createDirectoriesForArtifact = createDirectoriesForArtifact; +function createEmptyFilesForArtifact(emptyFilesToCreate) { + return __awaiter(this, void 0, void 0, function* () { + for (const filePath of emptyFilesToCreate) { + yield (yield fs_1.promises.open(filePath, 'w')).close(); + } + }); +} +exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact; +function getFileSize(filePath) { + return __awaiter(this, void 0, void 0, function* () { + const stats = yield fs_1.promises.stat(filePath); + core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); + return stats.size; + }); +} +exports.getFileSize = getFileSize; +function rmFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + yield fs_1.promises.unlink(filePath); + }); +} +exports.rmFile = rmFile; +function getProperRetention(retentionInput, retentionSetting) { + if (retentionInput < 0) { + throw new Error('Invalid retention, minimum value is 1.'); + } + let retention = retentionInput; + if (retentionSetting) { + const maxRetention = parseInt(retentionSetting); + if (!isNaN(maxRetention) && maxRetention < retention) { + core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); + retention = maxRetention; + } + } + return retention; +} +exports.getProperRetention = getProperRetention; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +exports.sleep = sleep; +function digestForStream(stream) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + const crc64 = new crc64_1.default(); + const md5 = crypto_1.default.createHash('md5'); + stream + .on('data', data => { + crc64.update(data); + md5.update(data); + }) + .on('end', () => resolve({ + crc64: crc64.digest('base64'), + md5: md5.digest('base64') + })) + .on('error', reject); + }); + }); +} +exports.digestForStream = digestForStream; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/utils.js.map b/node_modules/@actions/artifact/lib/internal/utils.js.map new file mode 100644 index 00000000..0684349b --- /dev/null +++ b/node_modules/@actions/artifact/lib/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/internal/utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,oDAA2B;AAC3B,2BAAiC;AAEjC,wCAAkD;AAClD,sDAA8E;AAC9E,wDAAqE;AACrE,yDAM2B;AAC3B,oDAA2B;AAE3B;;;GAGG;AACH,SAAgB,qCAAqC,CACnD,UAAkB;IAElB,IAAI,UAAU,GAAG,CAAC,EAAE;QAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;KACrD;SAAM,IAAI,UAAU,KAAK,CAAC,EAAE;QAC3B,OAAO,wDAAqC,EAAE,CAAA;KAC/C;IAED,MAAM,OAAO,GACX,wDAAqC,EAAE,GAAG,qCAAkB,EAAE,GAAG,UAAU,CAAA;IAC7E,MAAM,OAAO,GAAG,OAAO,GAAG,qCAAkB,EAAE,CAAA;IAE9C,sFAAsF;IACtF,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,GAAG,OAAO,CAAC,CAAA;AAClE,CAAC;AAfD,sFAeC;AAED;;GAEG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAA;IACtC,IAAI,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC,EAAE;QACpC,OAAO,SAAS,CAAA;KACjB;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,wCAMC;AAED;;GAEG;AACH,SAAgB,aAAa;IAC3B,OAAO,aAAa,CAAA;AACtB,CAAC;AAFD,sCAEC;AAED,SAAgB,mBAAmB,CAAC,UAAmB;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;AAC9C,CAAC;AALD,kDAKC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,KAAK,uBAAS,CAAC,SAAS,CAAA;AAC3C,CAAC;AALD,sDAKC;AAED,SAAgB,qBAAqB,CAAC,UAA8B;IAClE,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IAED,MAAM,oBAAoB,GAAG;QAC3B,uBAAS,CAAC,UAAU;QACpB,uBAAS,CAAC,cAAc;QACxB,uBAAS,CAAC,mBAAmB;QAC7B,uBAAS,CAAC,kBAAkB;QAC5B,uBAAS,CAAC,eAAe;QACzB,GAAG,CAAC,oBAAoB;KACzB,CAAA;IACD,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;AAClD,CAAC;AAdD,sDAcC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,KAAK,uBAAS,CAAC,eAAe,CAAA;AACjD,CAAC;AALD,sDAKC;AAED;;;;GAIG;AACH,SAAgB,uCAAuC,CACrD,OAA4B;IAE5B,IAAI,OAAO,CAAC,aAAa,CAAC,EAAE;QAC1B,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAA;QAChD,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE;YACrB,WAAI,CAAC,iDAAiD,SAAS,EAAE,CAAC,CAAA;YAClE,OAAO,SAAS,GAAG,IAAI,CAAA;SACxB;QACD,WAAI,CACF,sCAAsC,SAAS,oCAAoC,CACpF,CAAA;QACD,OAAO,SAAS,CAAA;KACjB;IACD,WAAI,CACF,8EAA8E,CAC/E,CAAA;IACD,sCAAsC;IACtC,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;IACpB,OAAO,SAAS,CAAA;AAClB,CAAC;AApBD,0FAoBC;AAED,SAAgB,eAAe,CAC7B,KAAa,EACb,GAAW,EACX,KAAa;IAEb,oCAAoC;IACpC,8BAA8B;IAC9B,2CAA2C;IAC3C,iCAAiC;IACjC,OAAO,SAAS,KAAK,IAAI,GAAG,IAAI,KAAK,EAAE,CAAA;AACzC,CAAC;AAVD,0CAUC;AAED;;;;;;;GAOG;AACH,SAAgB,kBAAkB,CAChC,WAAmB,EACnB,WAAqB,EACrB,UAAoB;IAEpB,MAAM,cAAc,GAAwB,EAAE,CAAA;IAE9C,IAAI,WAAW,EAAE;QACf,cAAc,CAAC,cAAc,CAAC,GAAG,WAAW,CAAA;KAC7C;IACD,IAAI,WAAW,EAAE;QACf,cAAc,CAAC,YAAY,CAAC,GAAG,YAAY,CAAA;QAC3C,mEAAmE;QACnE,cAAc,CAAC,YAAY,CAAC,GAAG,IAAI,CAAA;KACpC;IACD,IAAI,UAAU,EAAE;QACd,6GAA6G;QAC7G,cAAc,CAAC,iBAAiB,CAAC,GAAG,MAAM,CAAA;QAC1C,cAAc,CACZ,QAAQ,CACT,GAAG,wCAAwC,aAAa,EAAE,EAAE,CAAA;KAC9D;SAAM;QACL,sEAAsE;QACtE,cAAc,CAAC,QAAQ,CAAC,GAAG,gCAAgC,aAAa,EAAE,EAAE,CAAA;KAC7E;IAED,OAAO,cAAc,CAAA;AACvB,CAAC;AA3BD,gDA2BC;AAED;;;;;;;;;GASG;AACH,SAAgB,gBAAgB,CAC9B,WAAmB,EACnB,WAAqB,EACrB,MAAgB,EAChB,kBAA2B,EAC3B,aAAsB,EACtB,YAAqB,EACrB,MAAqB;IAErB,MAAM,cAAc,GAAwB,EAAE,CAAA;IAC9C,cAAc,CAAC,QAAQ,CAAC,GAAG,gCAAgC,aAAa,EAAE,EAAE,CAAA;IAC5E,IAAI,WAAW,EAAE;QACf,cAAc,CAAC,cAAc,CAAC,GAAG,WAAW,CAAA;KAC7C;IACD,IAAI,WAAW,EAAE;QACf,cAAc,CAAC,YAAY,CAAC,GAAG,YAAY,CAAA;QAC3C,mEAAmE;QACnE,cAAc,CAAC,YAAY,CAAC,GAAG,IAAI,CAAA;KACpC;IACD,IAAI,MAAM,EAAE;QACV,cAAc,CAAC,kBAAkB,CAAC,GAAG,MAAM,CAAA;QAC3C,cAAc,CAAC,kBAAkB,CAAC,GAAG,kBAAkB,CAAA;KACxD;IACD,IAAI,aAAa,EAAE;QACjB,cAAc,CAAC,gBAAgB,CAAC,GAAG,aAAa,CAAA;KACjD;IACD,IAAI,YAAY,EAAE;QAChB,cAAc,CAAC,eAAe,CAAC,GAAG,YAAY,CAAA;KAC/C;IACD,IAAI,MAAM,EAAE;QACV,cAAc,CAAC,yBAAyB,CAAC,GAAG,MAAM,CAAC,KAAK,CAAA;QACxD,cAAc,CAAC,uBAAuB,CAAC,GAAG,MAAM,CAAC,GAAG,CAAA;KACrD;IAED,OAAO,cAAc,CAAA;AACvB,CAAC;AAnCD,4CAmCC;AAED,SAAgB,gBAAgB,CAAC,SAAiB;IAChD,OAAO,IAAI,wBAAU,CAAC,SAAS,EAAE;QAC/B,IAAI,8BAAuB,CAAC,kCAAe,EAAE,CAAC;KAC/C,CAAC,CAAA;AACJ,CAAC;AAJD,4CAIC;AAED,SAAgB,cAAc;IAC5B,MAAM,WAAW,GAAG,GAAG,gCAAa,EAAE,6BAA6B,mCAAgB,EAAE,0BAA0B,aAAa,EAAE,EAAE,CAAA;IAChI,YAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;IACrC,OAAO,WAAW,CAAA;AACpB,CAAC;AAJD,wCAIC;AAED;;;;;;;;GAQG;AACH,SAAgB,sBAAsB,CAAC,QAA4B;IACjE,WAAI,CACF;eACW,QAAQ,CAAC,OAAO,CAAC,UAAU;kBACxB,QAAQ,CAAC,OAAO,CAAC,aAAa;sBAC1B,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAC;8CAC9B,CAC3C,CAAA;AACH,CAAC;AARD,wDAQC;AAED,SAAsB,4BAA4B,CAChD,WAAqB;;QAErB,KAAK,MAAM,SAAS,IAAI,WAAW,EAAE;YACnC,MAAM,aAAE,CAAC,KAAK,CAAC,SAAS,EAAE;gBACxB,SAAS,EAAE,IAAI;aAChB,CAAC,CAAA;SACH;IACH,CAAC;CAAA;AARD,oEAQC;AAED,SAAsB,2BAA2B,CAC/C,kBAA4B;;QAE5B,KAAK,MAAM,QAAQ,IAAI,kBAAkB,EAAE;YACzC,MAAM,CAAC,MAAM,aAAE,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAA;SAC7C;IACH,CAAC;CAAA;AAND,kEAMC;AAED,SAAsB,WAAW,CAAC,QAAgB;;QAChD,MAAM,KAAK,GAAG,MAAM,aAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;QACrC,YAAK,CACH,GAAG,QAAQ,UAAU,KAAK,CAAC,IAAI,cAAc,KAAK,CAAC,OAAO,aAAa,KAAK,CAAC,MAAM,GAAG,CACvF,CAAA;QACD,OAAO,KAAK,CAAC,IAAI,CAAA;IACnB,CAAC;CAAA;AAND,kCAMC;AAED,SAAsB,MAAM,CAAC,QAAgB;;QAC3C,MAAM,aAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;IAC3B,CAAC;CAAA;AAFD,wBAEC;AAED,SAAgB,kBAAkB,CAChC,cAAsB,EACtB,gBAAoC;IAEpC,IAAI,cAAc,GAAG,CAAC,EAAE;QACtB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAA;KAC1D;IAED,IAAI,SAAS,GAAG,cAAc,CAAA;IAC9B,IAAI,gBAAgB,EAAE;QACpB,MAAM,YAAY,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;QAC/C,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,YAAY,GAAG,SAAS,EAAE;YACpD,cAAO,CACL,uGAAuG,YAAY,OAAO,CAC3H,CAAA;YACD,SAAS,GAAG,YAAY,CAAA;SACzB;KACF;IACD,OAAO,SAAS,CAAA;AAClB,CAAC;AAnBD,gDAmBC;AAED,SAAsB,KAAK,CAAC,YAAoB;;QAC9C,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;IAClE,CAAC;CAAA;AAFD,sBAEC;AAOD,SAAsB,eAAe,CACnC,MAA6B;;QAE7B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACrC,MAAM,KAAK,GAAG,IAAI,eAAK,EAAE,CAAA;YACzB,MAAM,GAAG,GAAG,gBAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAA;YACpC,MAAM;iBACH,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE;gBACjB,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;gBAClB,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;YAClB,CAAC,CAAC;iBACD,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CACd,OAAO,CAAC;gBACN,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,QAAQ,CAAW;gBACvC,GAAG,EAAE,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC;aAC1B,CAAC,CACH;iBACA,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;QACxB,CAAC,CAAC,CAAA;IACJ,CAAC;CAAA;AAnBD,0CAmBC"} \ No newline at end of file diff --git a/node_modules/@actions/artifact/package.json b/node_modules/@actions/artifact/package.json new file mode 100644 index 00000000..eb2e8fc8 --- /dev/null +++ b/node_modules/@actions/artifact/package.json @@ -0,0 +1,49 @@ +{ + "name": "@actions/artifact", + "version": "1.1.1", + "preview": true, + "description": "Actions artifact lib", + "keywords": [ + "github", + "actions", + "artifact" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/artifact", + "license": "MIT", + "main": "lib/artifact-client.js", + "types": "lib/artifact-client.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/artifact" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.9.1", + "@actions/http-client": "^2.0.1", + "tmp": "^0.2.1", + "tmp-promise": "^3.0.2" + }, + "devDependencies": { + "@types/tmp": "^0.2.1", + "typescript": "^3.8.3" + } +} diff --git a/node_modules/@actions/core/LICENSE.md b/node_modules/@actions/core/LICENSE.md new file mode 100644 index 00000000..dbae2edb --- /dev/null +++ b/node_modules/@actions/core/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/core/README.md b/node_modules/@actions/core/README.md new file mode 100644 index 00000000..3c20c8ea --- /dev/null +++ b/node_modules/@actions/core/README.md @@ -0,0 +1,335 @@ +# `@actions/core` + +> Core functions for setting results, logging, registering secrets and exporting variables across actions + +## Usage + +### Import the package + +```js +// javascript +const core = require('@actions/core'); + +// typescript +import * as core from '@actions/core'; +``` + +#### Inputs/Outputs + +Action inputs can be read with `getInput` which returns a `string` or `getBooleanInput` which parses a boolean based on the [yaml 1.2 specification](https://yaml.org/spec/1.2/spec.html#id2804923). If `required` set to be false, the input should have a default value in `action.yml`. + +Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled. + +```js +const myInput = core.getInput('inputName', { required: true }); +const myBooleanInput = core.getBooleanInput('booleanInputName', { required: true }); +const myMultilineInput = core.getMultilineInput('multilineInputName', { required: true }); +core.setOutput('outputKey', 'outputVal'); +``` + +#### Exporting variables + +Since each step runs in a separate process, you can use `exportVariable` to add it to this step and future steps environment blocks. + +```js +core.exportVariable('envVar', 'Val'); +``` + +#### Setting a secret + +Setting a secret registers the secret with the runner to ensure it is masked in logs. + +```js +core.setSecret('myPassword'); +``` + +#### PATH Manipulation + +To make a tool's path available in the path for the remainder of the job (without altering the machine or containers state), use `addPath`. The runner will prepend the path given to the jobs PATH. + +```js +core.addPath('/path/to/mytool'); +``` + +#### Exit codes + +You should use this library to set the failing exit code for your action. If status is not set and the script runs to completion, that will lead to a success. + +```js +const core = require('@actions/core'); + +try { + // Do stuff +} +catch (err) { + // setFailed logs the message and sets a failing exit code + core.setFailed(`Action failed with error ${err}`); +} +``` + +Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned. + +#### Logging + +Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs). + +```js +const core = require('@actions/core'); + +const myInput = core.getInput('input'); +try { + core.debug('Inside try block'); + + if (!myInput) { + core.warning('myInput was not set'); + } + + if (core.isDebug()) { + // curl -v https://github.com + } else { + // curl https://github.com + } + + // Do stuff + core.info('Output to the actions build log') + + core.notice('This is a message that will also emit an annotation') +} +catch (err) { + core.error(`Error ${err}, action may still succeed though`); +} +``` + +This library can also wrap chunks of output in foldable groups. + +```js +const core = require('@actions/core') + +// Manually wrap output +core.startGroup('Do some function') +doSomeFunction() +core.endGroup() + +// Wrap an asynchronous function call +const result = await core.group('Do something async', async () => { + const response = await doSomeHTTPRequest() + return response +}) +``` + +#### Annotations + +This library has 3 methods that will produce [annotations](https://docs.github.com/en/rest/reference/checks#create-a-check-run). +```js +core.error('This is a bad error. This will also fail the build.') + +core.warning('Something went wrong, but it\'s not bad enough to fail the build.') + +core.notice('Something happened that you might want to know about.') +``` + +These will surface to the UI in the Actions page and on Pull Requests. They look something like this: + +![Annotations Image](../../docs/assets/annotations.png) + +These annotations can also be attached to particular lines and columns of your source files to show exactly where a problem is occuring. + +These options are: +```typescript +export interface AnnotationProperties { + /** + * A title for the annotation. + */ + title?: string + + /** + * The name of the file for which the annotation should be created. + */ + file?: string + + /** + * The start line for the annotation. + */ + startLine?: number + + /** + * The end line for the annotation. Defaults to `startLine` when `startLine` is provided. + */ + endLine?: number + + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + */ + startColumn?: number + + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + * Defaults to `startColumn` when `startColumn` is provided. + */ + endColumn?: number +} +``` + +#### Styling output + +Colored output is supported in the Action logs via standard [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code). 3/4 bit, 8 bit and 24 bit colors are all supported. + +Foreground colors: + +```js +// 3/4 bit +core.info('\u001b[35mThis foreground will be magenta') + +// 8 bit +core.info('\u001b[38;5;6mThis foreground will be cyan') + +// 24 bit +core.info('\u001b[38;2;255;0;0mThis foreground will be bright red') +``` + +Background colors: + +```js +// 3/4 bit +core.info('\u001b[43mThis background will be yellow'); + +// 8 bit +core.info('\u001b[48;5;6mThis background will be cyan') + +// 24 bit +core.info('\u001b[48;2;255;0;0mThis background will be bright red') +``` + +Special styles: + +```js +core.info('\u001b[1mBold text') +core.info('\u001b[3mItalic text') +core.info('\u001b[4mUnderlined text') +``` + +ANSI escape codes can be combined with one another: + +```js +core.info('\u001b[31;46mRed foreground with a cyan background and \u001b[1mbold text at the end'); +``` + +> Note: Escape codes reset at the start of each line + +```js +core.info('\u001b[35mThis foreground will be magenta') +core.info('This foreground will reset to the default') +``` + +Manually typing escape codes can be a little difficult, but you can use third party modules such as [ansi-styles](https://github.com/chalk/ansi-styles). + +```js +const style = require('ansi-styles'); +core.info(style.color.ansi16m.hex('#abcdef') + 'Hello world!') +``` + +#### Action state + +You can use this library to save state and get state for sharing information between a given wrapper action: + +**action.yml**: + +```yaml +name: 'Wrapper action sample' +inputs: + name: + default: 'GitHub' +runs: + using: 'node12' + main: 'main.js' + post: 'cleanup.js' +``` + +In action's `main.js`: + +```js +const core = require('@actions/core'); + +core.saveState("pidToKill", 12345); +``` + +In action's `cleanup.js`: + +```js +const core = require('@actions/core'); + +var pid = core.getState("pidToKill"); + +process.kill(pid); +``` + +#### OIDC Token + +You can use these methods to interact with the GitHub OIDC provider and get a JWT ID token which would help to get access token from third party cloud providers. + +**Method Name**: getIDToken() + +**Inputs** + +audience : optional + +**Outputs** + +A [JWT](https://jwt.io/) ID Token + +In action's `main.ts`: +```js +const core = require('@actions/core'); +async function getIDTokenAction(): Promise { + + const audience = core.getInput('audience', {required: false}) + + const id_token1 = await core.getIDToken() // ID Token with default audience + const id_token2 = await core.getIDToken(audience) // ID token with custom audience + + // this id_token can be used to get access token from third party cloud providers +} +getIDTokenAction() +``` + +In action's `actions.yml`: + +```yaml +name: 'GetIDToken' +description: 'Get ID token from Github OIDC provider' +inputs: + audience: + description: 'Audience for which the ID token is intended for' + required: false +outputs: + id_token1: + description: 'ID token obtained from OIDC provider' + id_token2: + description: 'ID token obtained from OIDC provider' +runs: + using: 'node12' + main: 'dist/index.js' +``` + +#### Filesystem path helpers + +You can use these methods to manipulate file paths across operating systems. + +The `toPosixPath` function converts input paths to Posix-style (Linux) paths. +The `toWin32Path` function converts input paths to Windows-style paths. These +functions work independently of the underlying runner operating system. + +```js +toPosixPath('\\foo\\bar') // => /foo/bar +toWin32Path('/foo/bar') // => \foo\bar +``` + +The `toPlatformPath` function converts input paths to the expected value on the runner's operating system. + +```js +// On a Windows runner. +toPlatformPath('/foo/bar') // => \foo\bar + +// On a Linux runner. +toPlatformPath('\\foo\\bar') // => /foo/bar +``` diff --git a/node_modules/@actions/core/lib/command.d.ts b/node_modules/@actions/core/lib/command.d.ts new file mode 100644 index 00000000..53f8f4b8 --- /dev/null +++ b/node_modules/@actions/core/lib/command.d.ts @@ -0,0 +1,15 @@ +export interface CommandProperties { + [key: string]: any; +} +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +export declare function issueCommand(command: string, properties: CommandProperties, message: any): void; +export declare function issue(name: string, message?: string): void; diff --git a/node_modules/@actions/core/lib/command.js b/node_modules/@actions/core/lib/command.js new file mode 100644 index 00000000..0b28c66b --- /dev/null +++ b/node_modules/@actions/core/lib/command.js @@ -0,0 +1,92 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(require("os")); +const utils_1 = require("./utils"); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/command.js.map b/node_modules/@actions/core/lib/command.js.map new file mode 100644 index 00000000..51c7c637 --- /dev/null +++ b/node_modules/@actions/core/lib/command.js.map @@ -0,0 +1 @@ +{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAO,GAAG,EAAE;IAC9C,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.d.ts b/node_modules/@actions/core/lib/core.d.ts new file mode 100644 index 00000000..1defb572 --- /dev/null +++ b/node_modules/@actions/core/lib/core.d.ts @@ -0,0 +1,198 @@ +/** + * Interface for getInput options + */ +export interface InputOptions { + /** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */ + required?: boolean; + /** Optional. Whether leading/trailing whitespace will be trimmed for the input. Defaults to true */ + trimWhitespace?: boolean; +} +/** + * The code to exit an action + */ +export declare enum ExitCode { + /** + * A code indicating that the action was successful + */ + Success = 0, + /** + * A code indicating that the action was a failure + */ + Failure = 1 +} +/** + * Optional properties that can be sent with annotatation commands (notice, error, and warning) + * See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations. + */ +export interface AnnotationProperties { + /** + * A title for the annotation. + */ + title?: string; + /** + * The path of the file for which the annotation should be created. + */ + file?: string; + /** + * The start line for the annotation. + */ + startLine?: number; + /** + * The end line for the annotation. Defaults to `startLine` when `startLine` is provided. + */ + endLine?: number; + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + */ + startColumn?: number; + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + * Defaults to `startColumn` when `startColumn` is provided. + */ + endColumn?: number; +} +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +export declare function exportVariable(name: string, val: any): void; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +export declare function setSecret(secret: string): void; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +export declare function addPath(inputPath: string): void; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +export declare function getInput(name: string, options?: InputOptions): string; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +export declare function getMultilineInput(name: string, options?: InputOptions): string[]; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +export declare function getBooleanInput(name: string, options?: InputOptions): boolean; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +export declare function setOutput(name: string, value: any): void; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +export declare function setCommandEcho(enabled: boolean): void; +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +export declare function setFailed(message: string | Error): void; +/** + * Gets whether Actions Step Debug is on or not + */ +export declare function isDebug(): boolean; +/** + * Writes debug message to user log + * @param message debug message + */ +export declare function debug(message: string): void; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function error(message: string | Error, properties?: AnnotationProperties): void; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function warning(message: string | Error, properties?: AnnotationProperties): void; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function notice(message: string | Error, properties?: AnnotationProperties): void; +/** + * Writes info to log with console.log. + * @param message info message + */ +export declare function info(message: string): void; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +export declare function startGroup(name: string): void; +/** + * End an output group. + */ +export declare function endGroup(): void; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +export declare function group(name: string, fn: () => Promise): Promise; +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +export declare function saveState(name: string, value: any): void; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +export declare function getState(name: string): string; +export declare function getIDToken(aud?: string): Promise; +/** + * Summary exports + */ +export { summary } from './summary'; +/** + * @deprecated use core.summary + */ +export { markdownSummary } from './summary'; +/** + * Path exports + */ +export { toPosixPath, toWin32Path, toPlatformPath } from './path-utils'; diff --git a/node_modules/@actions/core/lib/core.js b/node_modules/@actions/core/lib/core.js new file mode 100644 index 00000000..48df6ad0 --- /dev/null +++ b/node_modules/@actions/core/lib/core.js @@ -0,0 +1,336 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = require("./command"); +const file_command_1 = require("./file-command"); +const utils_1 = require("./utils"); +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const oidc_utils_1 = require("./oidc-utils"); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + } + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = require("./summary"); +Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } }); +/** + * @deprecated use core.summary + */ +var summary_2 = require("./summary"); +Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } }); +/** + * Path exports + */ +var path_utils_1 = require("./path-utils"); +Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } }); +Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } }); +Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }); +//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/core.js.map b/node_modules/@actions/core/lib/core.js.map new file mode 100644 index 00000000..99f7fd85 --- /dev/null +++ b/node_modules/@actions/core/lib/core.js.map @@ -0,0 +1 @@ +{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAAuE;AACvE,mCAA2D;AAE3D,uCAAwB;AACxB,2CAA4B;AAE5B,6CAAuC;AAavC;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAuCD,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,OAAO,+BAAgB,CAAC,KAAK,EAAE,qCAAsB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;KAClE;IAED,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;AAC/C,CAAC;AAVD,wCAUC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,+BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;;;GAQG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,GAAG,CAAA;KACX;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AAZD,4BAYC;AAED;;;;;;;GAOG;AACH,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,OAAsB;IAEtB,MAAM,MAAM,GAAa,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;SAC7C,KAAK,CAAC,IAAI,CAAC;SACX,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAExB,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,MAAM,CAAA;KACd;IAED,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAA;AAC1C,CAAC;AAbD,8CAaC;AAED;;;;;;;;;GASG;AACH,SAAgB,eAAe,CAAC,IAAY,EAAE,OAAsB;IAClE,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;IAC1C,MAAM,UAAU,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,IAAI,CAAA;IACxC,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAC1C,MAAM,IAAI,SAAS,CACjB,6DAA6D,IAAI,IAAI;QACnE,4EAA4E,CAC/E,CAAA;AACH,CAAC;AAVD,0CAUC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,EAAE,CAAA;IACnD,IAAI,QAAQ,EAAE;QACZ,OAAO,+BAAgB,CAAC,QAAQ,EAAE,qCAAsB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAA;KACvE;IAED,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,sBAAc,CAAC,KAAK,CAAC,CAAC,CAAA;AAC3D,CAAC;AARD,8BAQC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;;GAIG;AACH,SAAgB,KAAK,CACnB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,OAAO,EACP,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,sBASC;AAED;;;;GAIG;AACH,SAAgB,OAAO,CACrB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,SAAS,EACT,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,0BASC;AAED;;;;GAIG;AACH,SAAgB,MAAM,CACpB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,QAAQ,EACR,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,wBASC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAA;IAClD,IAAI,QAAQ,EAAE;QACZ,OAAO,+BAAgB,CAAC,OAAO,EAAE,qCAAsB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAA;KACtE;IAED,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,sBAAc,CAAC,KAAK,CAAC,CAAC,CAAA;AAC3D,CAAC;AAPD,8BAOC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC;AAED,SAAsB,UAAU,CAAC,GAAY;;QAC3C,OAAO,MAAM,uBAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACzC,CAAC;CAAA;AAFD,gCAEC;AAED;;GAEG;AACH,qCAAiC;AAAzB,kGAAA,OAAO,OAAA;AAEf;;GAEG;AACH,qCAAyC;AAAjC,0GAAA,eAAe,OAAA;AAEvB;;GAEG;AACH,2CAAqE;AAA7D,yGAAA,WAAW,OAAA;AAAE,yGAAA,WAAW,OAAA;AAAE,4GAAA,cAAc,OAAA"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/file-command.d.ts b/node_modules/@actions/core/lib/file-command.d.ts new file mode 100644 index 00000000..2d1f2f42 --- /dev/null +++ b/node_modules/@actions/core/lib/file-command.d.ts @@ -0,0 +1,2 @@ +export declare function issueFileCommand(command: string, message: any): void; +export declare function prepareKeyValueMessage(key: string, value: any): string; diff --git a/node_modules/@actions/core/lib/file-command.js b/node_modules/@actions/core/lib/file-command.js new file mode 100644 index 00000000..2d0d738f --- /dev/null +++ b/node_modules/@actions/core/lib/file-command.js @@ -0,0 +1,58 @@ +"use strict"; +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(require("fs")); +const os = __importStar(require("os")); +const uuid_1 = require("uuid"); +const utils_1 = require("./utils"); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/file-command.js.map b/node_modules/@actions/core/lib/file-command.js.map new file mode 100644 index 00000000..b1a9d54d --- /dev/null +++ b/node_modules/@actions/core/lib/file-command.js.map @@ -0,0 +1 @@ +{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;;;;;;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,+BAAiC;AACjC,mCAAsC;AAEtC,SAAgB,gBAAgB,CAAC,OAAe,EAAE,OAAY;IAC5D,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,4CAcC;AAED,SAAgB,sBAAsB,CAAC,GAAW,EAAE,KAAU;IAC5D,MAAM,SAAS,GAAG,gBAAgB,SAAM,EAAE,EAAE,CAAA;IAC5C,MAAM,cAAc,GAAG,sBAAc,CAAC,KAAK,CAAC,CAAA;IAE5C,4EAA4E;IAC5E,6EAA6E;IAC7E,iBAAiB;IACjB,IAAI,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QAC3B,MAAM,IAAI,KAAK,CACb,4DAA4D,SAAS,GAAG,CACzE,CAAA;KACF;IAED,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CACb,6DAA6D,SAAS,GAAG,CAC1E,CAAA;KACF;IAED,OAAO,GAAG,GAAG,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,cAAc,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;AAC9E,CAAC;AApBD,wDAoBC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/oidc-utils.d.ts b/node_modules/@actions/core/lib/oidc-utils.d.ts new file mode 100644 index 00000000..657c7f4a --- /dev/null +++ b/node_modules/@actions/core/lib/oidc-utils.d.ts @@ -0,0 +1,7 @@ +export declare class OidcClient { + private static createHttpClient; + private static getRequestToken; + private static getIDTokenUrl; + private static getCall; + static getIDToken(audience?: string): Promise; +} diff --git a/node_modules/@actions/core/lib/oidc-utils.js b/node_modules/@actions/core/lib/oidc-utils.js new file mode 100644 index 00000000..f7012770 --- /dev/null +++ b/node_modules/@actions/core/lib/oidc-utils.js @@ -0,0 +1,77 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.OidcClient = void 0; +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const core_1 = require("./core"); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.result.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/oidc-utils.js.map b/node_modules/@actions/core/lib/oidc-utils.js.map new file mode 100644 index 00000000..284fa1d3 --- /dev/null +++ b/node_modules/@actions/core/lib/oidc-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CACtC,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/path-utils.d.ts b/node_modules/@actions/core/lib/path-utils.d.ts new file mode 100644 index 00000000..1fee9f39 --- /dev/null +++ b/node_modules/@actions/core/lib/path-utils.d.ts @@ -0,0 +1,25 @@ +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +export declare function toPosixPath(pth: string): string; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +export declare function toWin32Path(pth: string): string; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +export declare function toPlatformPath(pth: string): string; diff --git a/node_modules/@actions/core/lib/path-utils.js b/node_modules/@actions/core/lib/path-utils.js new file mode 100644 index 00000000..7251c829 --- /dev/null +++ b/node_modules/@actions/core/lib/path-utils.js @@ -0,0 +1,58 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(require("path")); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/path-utils.js.map b/node_modules/@actions/core/lib/path-utils.js.map new file mode 100644 index 00000000..7ab1cace --- /dev/null +++ b/node_modules/@actions/core/lib/path-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-utils.js","sourceRoot":"","sources":["../src/path-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAE5B;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;;GAOG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,wCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/summary.d.ts b/node_modules/@actions/core/lib/summary.d.ts new file mode 100644 index 00000000..bb792555 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.d.ts @@ -0,0 +1,202 @@ +export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY"; +export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary"; +export declare type SummaryTableRow = (SummaryTableCell | string)[]; +export interface SummaryTableCell { + /** + * Cell content + */ + data: string; + /** + * Render cell as header + * (optional) default: false + */ + header?: boolean; + /** + * Number of columns the cell extends + * (optional) default: '1' + */ + colspan?: string; + /** + * Number of rows the cell extends + * (optional) default: '1' + */ + rowspan?: string; +} +export interface SummaryImageOptions { + /** + * The width of the image in pixels. Must be an integer without a unit. + * (optional) + */ + width?: string; + /** + * The height of the image in pixels. Must be an integer without a unit. + * (optional) + */ + height?: string; +} +export interface SummaryWriteOptions { + /** + * Replace all existing content in summary file with buffer contents + * (optional) default: false + */ + overwrite?: boolean; +} +declare class Summary { + private _buffer; + private _filePath?; + constructor(); + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + private filePath; + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + private wrap; + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options?: SummaryWriteOptions): Promise; + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear(): Promise; + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify(): string; + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer(): boolean; + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer(): Summary; + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text: string, addEOL?: boolean): Summary; + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL(): Summary; + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code: string, lang?: string): Summary; + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items: string[], ordered?: boolean): Summary; + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows: SummaryTableRow[]): Summary; + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label: string, content: string): Summary; + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src: string, alt: string, options?: SummaryImageOptions): Summary; + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text: string, level?: number | string): Summary; + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator(): Summary; + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak(): Summary; + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text: string, cite?: string): Summary; + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text: string, href: string): Summary; +} +/** + * @deprecated use `core.summary` + */ +export declare const markdownSummary: Summary; +export declare const summary: Summary; +export {}; diff --git a/node_modules/@actions/core/lib/summary.js b/node_modules/@actions/core/lib/summary.js new file mode 100644 index 00000000..04a335b8 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.js @@ -0,0 +1,283 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = require("os"); +const fs_1 = require("fs"); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/summary.js.map b/node_modules/@actions/core/lib/summary.js.map new file mode 100644 index 00000000..d598f264 --- /dev/null +++ b/node_modules/@actions/core/lib/summary.js.map @@ -0,0 +1 @@ +{"version":3,"file":"summary.js","sourceRoot":"","sources":["../src/summary.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2BAAsB;AACtB,2BAAsC;AACtC,MAAM,EAAC,MAAM,EAAE,UAAU,EAAE,SAAS,EAAC,GAAG,aAAQ,CAAA;AAEnC,QAAA,eAAe,GAAG,qBAAqB,CAAA;AACvC,QAAA,gBAAgB,GAC3B,2GAA2G,CAAA;AA+C7G,MAAM,OAAO;IAIX;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;IACnB,CAAC;IAED;;;;;OAKG;IACW,QAAQ;;YACpB,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,OAAO,IAAI,CAAC,SAAS,CAAA;aACtB;YAED,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAe,CAAC,CAAA;YAChD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CACb,4CAA4C,uBAAe,6DAA6D,CACzH,CAAA;aACF;YAED,IAAI;gBACF,MAAM,MAAM,CAAC,WAAW,EAAE,cAAS,CAAC,IAAI,GAAG,cAAS,CAAC,IAAI,CAAC,CAAA;aAC3D;YAAC,WAAM;gBACN,MAAM,IAAI,KAAK,CACb,mCAAmC,WAAW,0DAA0D,CACzG,CAAA;aACF;YAED,IAAI,CAAC,SAAS,GAAG,WAAW,CAAA;YAC5B,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;KAAA;IAED;;;;;;;;OAQG;IACK,IAAI,CACV,GAAW,EACX,OAAsB,EACtB,QAAuC,EAAE;QAEzC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;aACpC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,GAAG,KAAK,KAAK,GAAG,CAAC;aAC3C,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,IAAI,GAAG,GAAG,SAAS,GAAG,CAAA;SAC9B;QAED,OAAO,IAAI,GAAG,GAAG,SAAS,IAAI,OAAO,KAAK,GAAG,GAAG,CAAA;IAClD,CAAC;IAED;;;;;;OAMG;IACG,KAAK,CAAC,OAA6B;;YACvC,MAAM,SAAS,GAAG,CAAC,EAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,CAAA,CAAA;YACtC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAA;YACtC,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAA;YACpD,MAAM,SAAS,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAA;YAC3D,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;QAC3B,CAAC;KAAA;IAED;;;;OAIG;IACG,KAAK;;YACT,OAAO,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,EAAC,SAAS,EAAE,IAAI,EAAC,CAAC,CAAA;QACpD,CAAC;KAAA;IAED;;;;OAIG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED;;;;OAIG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,CAAA;IAClC,CAAC;IAED;;;;OAIG;IACH,WAAW;QACT,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,IAAY,EAAE,MAAM,GAAG,KAAK;QACjC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAA;QACpB,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,MAAM,CAAC,QAAG,CAAC,CAAA;IACzB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,IAAY,EAAE,IAAa;QACtC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;QAChE,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,KAAe,EAAE,OAAO,GAAG,KAAK;QACtC,MAAM,GAAG,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;QACjC,MAAM,SAAS,GAAG,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACnE,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,CAAA;QACzC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;OAMG;IACH,QAAQ,CAAC,IAAuB;QAC9B,MAAM,SAAS,GAAG,IAAI;aACnB,GAAG,CAAC,GAAG,CAAC,EAAE;YACT,MAAM,KAAK,GAAG,GAAG;iBACd,GAAG,CAAC,IAAI,CAAC,EAAE;gBACV,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBAC5B,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;iBAC7B;gBAED,MAAM,EAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAC,GAAG,IAAI,CAAA;gBAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;gBAChC,MAAM,KAAK,mCACN,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,GACtB,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,CAC1B,CAAA;gBAED,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACpC,CAAC,CAAC;iBACD,IAAI,CAAC,EAAE,CAAC,CAAA;YAEX,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QAC/B,CAAC,CAAC;aACD,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,OAAe;QACvC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO,CAAC,CAAA;QAC3E,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;;OAQG;IACH,QAAQ,CAAC,GAAW,EAAE,GAAW,EAAE,OAA6B;QAC9D,MAAM,EAAC,KAAK,EAAE,MAAM,EAAC,GAAG,OAAO,IAAI,EAAE,CAAA;QACrC,MAAM,KAAK,mCACN,CAAC,KAAK,IAAI,EAAC,KAAK,EAAC,CAAC,GAClB,CAAC,MAAM,IAAI,EAAC,MAAM,EAAC,CAAC,CACxB,CAAA;QAED,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,kBAAG,GAAG,EAAE,GAAG,IAAK,KAAK,EAAE,CAAA;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,IAAY,EAAE,KAAuB;QAC9C,MAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;QACvB,MAAM,UAAU,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;YACnE,CAAC,CAAC,GAAG;YACL,CAAC,CAAC,IAAI,CAAA;QACR,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAA;QAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,YAAY;QACV,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,QAAQ;QACN,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,QAAQ,CAAC,IAAY,EAAE,IAAa;QAClC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QACpD,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,IAAY,EAAE,IAAY;QAChC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,EAAC,IAAI,EAAC,CAAC,CAAA;QAC5C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;CACF;AAED,MAAM,QAAQ,GAAG,IAAI,OAAO,EAAE,CAAA;AAE9B;;GAEG;AACU,QAAA,eAAe,GAAG,QAAQ,CAAA;AAC1B,QAAA,OAAO,GAAG,QAAQ,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/core/lib/utils.d.ts b/node_modules/@actions/core/lib/utils.d.ts new file mode 100644 index 00000000..3b9e28d5 --- /dev/null +++ b/node_modules/@actions/core/lib/utils.d.ts @@ -0,0 +1,14 @@ +import { AnnotationProperties } from './core'; +import { CommandProperties } from './command'; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +export declare function toCommandValue(input: any): string; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +export declare function toCommandProperties(annotationProperties: AnnotationProperties): CommandProperties; diff --git a/node_modules/@actions/core/lib/utils.js b/node_modules/@actions/core/lib/utils.js new file mode 100644 index 00000000..9b5ca44b --- /dev/null +++ b/node_modules/@actions/core/lib/utils.js @@ -0,0 +1,40 @@ +"use strict"; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@actions/core/lib/utils.js.map b/node_modules/@actions/core/lib/utils.js.map new file mode 100644 index 00000000..8211bb7e --- /dev/null +++ b/node_modules/@actions/core/lib/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;;AAKvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC;AAED;;;;;GAKG;AACH,SAAgB,mBAAmB,CACjC,oBAA0C;IAE1C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC,MAAM,EAAE;QAC7C,OAAO,EAAE,CAAA;KACV;IAED,OAAO;QACL,KAAK,EAAE,oBAAoB,CAAC,KAAK;QACjC,IAAI,EAAE,oBAAoB,CAAC,IAAI;QAC/B,IAAI,EAAE,oBAAoB,CAAC,SAAS;QACpC,OAAO,EAAE,oBAAoB,CAAC,OAAO;QACrC,GAAG,EAAE,oBAAoB,CAAC,WAAW;QACrC,SAAS,EAAE,oBAAoB,CAAC,SAAS;KAC1C,CAAA;AACH,CAAC;AAfD,kDAeC"} \ No newline at end of file diff --git a/node_modules/@actions/core/package.json b/node_modules/@actions/core/package.json new file mode 100644 index 00000000..1f3824de --- /dev/null +++ b/node_modules/@actions/core/package.json @@ -0,0 +1,46 @@ +{ + "name": "@actions/core", + "version": "1.10.0", + "description": "Actions core lib", + "keywords": [ + "github", + "actions", + "core" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/core", + "license": "MIT", + "main": "lib/core.js", + "types": "lib/core.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/core" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + }, + "devDependencies": { + "@types/node": "^12.0.2", + "@types/uuid": "^8.3.4" + } +} diff --git a/node_modules/@actions/http-client/LICENSE b/node_modules/@actions/http-client/LICENSE new file mode 100644 index 00000000..5823a51c --- /dev/null +++ b/node_modules/@actions/http-client/LICENSE @@ -0,0 +1,21 @@ +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@actions/http-client/README.md b/node_modules/@actions/http-client/README.md new file mode 100644 index 00000000..7e06adeb --- /dev/null +++ b/node_modules/@actions/http-client/README.md @@ -0,0 +1,73 @@ +# `@actions/http-client` + +A lightweight HTTP client optimized for building actions. + +## Features + + - HTTP client with TypeScript generics and async/await/Promises + - Typings included! + - [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner + - Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+. + - Basic, Bearer and PAT Support out of the box. Extensible handlers for others. + - Redirects supported + +Features and releases [here](./RELEASES.md) + +## Install + +``` +npm install @actions/http-client --save +``` + +## Samples + +See the [tests](./__tests__) for detailed examples. + +## Errors + +### HTTP + +The HTTP client does not throw unless truly exceptional. + +* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body. +* Redirects (3xx) will be followed by default. + +See the [tests](./__tests__) for detailed examples. + +## Debugging + +To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible: + +```shell +export NODE_DEBUG=http +``` + +## Node support + +The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+. + +## Support and Versioning + +We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat). + +## Contributing + +We welcome PRs. Please create an issue and if applicable, a design before proceeding with code. + +once: + +``` +npm install +``` + +To build: + +``` +npm run build +``` + +To run all tests: + +``` +npm test +``` diff --git a/node_modules/@actions/http-client/lib/auth.d.ts b/node_modules/@actions/http-client/lib/auth.d.ts new file mode 100644 index 00000000..8cc9fc3d --- /dev/null +++ b/node_modules/@actions/http-client/lib/auth.d.ts @@ -0,0 +1,26 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { HttpClientResponse } from './index'; +export declare class BasicCredentialHandler implements ifm.RequestHandler { + username: string; + password: string; + constructor(username: string, password: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class BearerCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} diff --git a/node_modules/@actions/http-client/lib/auth.js b/node_modules/@actions/http-client/lib/auth.js new file mode 100644 index 00000000..2c150a3d --- /dev/null +++ b/node_modules/@actions/http-client/lib/auth.js @@ -0,0 +1,81 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/auth.js.map b/node_modules/@actions/http-client/lib/auth.js.map new file mode 100644 index 00000000..7d3a18af --- /dev/null +++ b/node_modules/@actions/http-client/lib/auth.js.map @@ -0,0 +1 @@ +{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/index.d.ts b/node_modules/@actions/http-client/lib/index.d.ts new file mode 100644 index 00000000..fe733d14 --- /dev/null +++ b/node_modules/@actions/http-client/lib/index.d.ts @@ -0,0 +1,123 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +export declare enum HttpCodes { + OK = 200, + MultipleChoices = 300, + MovedPermanently = 301, + ResourceMoved = 302, + SeeOther = 303, + NotModified = 304, + UseProxy = 305, + SwitchProxy = 306, + TemporaryRedirect = 307, + PermanentRedirect = 308, + BadRequest = 400, + Unauthorized = 401, + PaymentRequired = 402, + Forbidden = 403, + NotFound = 404, + MethodNotAllowed = 405, + NotAcceptable = 406, + ProxyAuthenticationRequired = 407, + RequestTimeout = 408, + Conflict = 409, + Gone = 410, + TooManyRequests = 429, + InternalServerError = 500, + NotImplemented = 501, + BadGateway = 502, + ServiceUnavailable = 503, + GatewayTimeout = 504 +} +export declare enum Headers { + Accept = "accept", + ContentType = "content-type" +} +export declare enum MediaTypes { + ApplicationJson = "application/json" +} +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +export declare function getProxyUrl(serverUrl: string): string; +export declare class HttpClientError extends Error { + constructor(message: string, statusCode: number); + statusCode: number; + result?: any; +} +export declare class HttpClientResponse { + constructor(message: http.IncomingMessage); + message: http.IncomingMessage; + readBody(): Promise; +} +export declare function isHttps(requestUrl: string): boolean; +export declare class HttpClient { + userAgent: string | undefined; + handlers: ifm.RequestHandler[]; + requestOptions: ifm.RequestOptions | undefined; + private _ignoreSslError; + private _socketTimeout; + private _allowRedirects; + private _allowRedirectDowngrade; + private _maxRedirects; + private _allowRetries; + private _maxRetries; + private _agent; + private _proxyAgent; + private _keepAlive; + private _disposed; + constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions); + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + postJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + putJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + patchJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise; + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose(): void; + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void; + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl: string): http.Agent; + private _prepareRequest; + private _mergeHeaders; + private _getExistingOrDefaultHeader; + private _getAgent; + private _performExponentialBackoff; + private _processResponse; +} diff --git a/node_modules/@actions/http-client/lib/index.js b/node_modules/@actions/http-client/lib/index.js new file mode 100644 index 00000000..a1b7d032 --- /dev/null +++ b/node_modules/@actions/http-client/lib/index.js @@ -0,0 +1,605 @@ +"use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(require("http")); +const https = __importStar(require("https")); +const pm = __importStar(require("./proxy")); +const tunnel = __importStar(require("tunnel")); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/index.js.map b/node_modules/@actions/http-client/lib/index.js.map new file mode 100644 index 00000000..ca8ea415 --- /dev/null +++ b/node_modules/@actions/http-client/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAEhC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,GAAP,eAAO,KAAP,eAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAnBD,gDAmBC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAiBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAf7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAGf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAS,GAAG;YAC1B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,QAAQ,EAAE;YAChC,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,wFAAwF;QACxF,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,KAAK,EAAE;YAC7B,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,gFAAgF;QAChF,IAAI,CAAC,KAAK,EAAE;YACV,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;SACxD;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAlpBD,gCAkpBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/interfaces.d.ts b/node_modules/@actions/http-client/lib/interfaces.d.ts new file mode 100644 index 00000000..54fd4a89 --- /dev/null +++ b/node_modules/@actions/http-client/lib/interfaces.d.ts @@ -0,0 +1,44 @@ +/// +import * as http from 'http'; +import * as https from 'https'; +import { HttpClientResponse } from './index'; +export interface HttpClient { + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise; + requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise; + requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void; +} +export interface RequestHandler { + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(response: HttpClientResponse): boolean; + handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; +} +export interface RequestInfo { + options: http.RequestOptions; + parsedUrl: URL; + httpModule: typeof http | typeof https; +} +export interface RequestOptions { + headers?: http.OutgoingHttpHeaders; + socketTimeout?: number; + ignoreSslError?: boolean; + allowRedirects?: boolean; + allowRedirectDowngrade?: boolean; + maxRedirects?: number; + maxSockets?: number; + keepAlive?: boolean; + deserializeDates?: boolean; + allowRetries?: boolean; + maxRetries?: number; +} +export interface TypedResponse { + statusCode: number; + result: T | null; + headers: http.IncomingHttpHeaders; +} diff --git a/node_modules/@actions/http-client/lib/interfaces.js b/node_modules/@actions/http-client/lib/interfaces.js new file mode 100644 index 00000000..db919115 --- /dev/null +++ b/node_modules/@actions/http-client/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/interfaces.js.map b/node_modules/@actions/http-client/lib/interfaces.js.map new file mode 100644 index 00000000..8fb5f7d1 --- /dev/null +++ b/node_modules/@actions/http-client/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/proxy.d.ts b/node_modules/@actions/http-client/lib/proxy.d.ts new file mode 100644 index 00000000..45998654 --- /dev/null +++ b/node_modules/@actions/http-client/lib/proxy.d.ts @@ -0,0 +1,2 @@ +export declare function getProxyUrl(reqUrl: URL): URL | undefined; +export declare function checkBypass(reqUrl: URL): boolean; diff --git a/node_modules/@actions/http-client/lib/proxy.js b/node_modules/@actions/http-client/lib/proxy.js new file mode 100644 index 00000000..76abb723 --- /dev/null +++ b/node_modules/@actions/http-client/lib/proxy.js @@ -0,0 +1,76 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map \ No newline at end of file diff --git a/node_modules/@actions/http-client/lib/proxy.js.map b/node_modules/@actions/http-client/lib/proxy.js.map new file mode 100644 index 00000000..b8206790 --- /dev/null +++ b/node_modules/@actions/http-client/lib/proxy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@actions/http-client/package.json b/node_modules/@actions/http-client/package.json new file mode 100644 index 00000000..7f5c8ec3 --- /dev/null +++ b/node_modules/@actions/http-client/package.json @@ -0,0 +1,48 @@ +{ + "name": "@actions/http-client", + "version": "2.1.0", + "description": "Actions Http Client", + "keywords": [ + "github", + "actions", + "http" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/http-client", + "license": "MIT", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/http-client" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "build": "tsc", + "format": "prettier --write **/*.ts", + "format-check": "prettier --check **/*.ts", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "devDependencies": { + "@types/tunnel": "0.0.3", + "proxy": "^1.0.1" + }, + "dependencies": { + "tunnel": "^0.0.6" + } +} diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 00000000..cea8b16e --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 00000000..2cdc8e41 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 00000000..d2a48b6b --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 00000000..c67a6460 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 00000000..ce6073e0 --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 00000000..de322667 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 00000000..6b4e0e16 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 00000000..0478be81 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 00000000..a18faa8f --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/concat-map/.travis.yml b/node_modules/concat-map/.travis.yml new file mode 100644 index 00000000..f1d0f13c --- /dev/null +++ b/node_modules/concat-map/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/concat-map/LICENSE b/node_modules/concat-map/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/node_modules/concat-map/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/concat-map/README.markdown b/node_modules/concat-map/README.markdown new file mode 100644 index 00000000..408f70a1 --- /dev/null +++ b/node_modules/concat-map/README.markdown @@ -0,0 +1,62 @@ +concat-map +========== + +Concatenative mapdashery. + +[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) + +[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) + +example +======= + +``` js +var concatMap = require('concat-map'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); +``` + +*** + +``` +[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] +``` + +methods +======= + +``` js +var concatMap = require('concat-map') +``` + +concatMap(xs, fn) +----------------- + +Return an array of concatenated elements by calling `fn(x, i)` for each element +`x` and each index `i` in the array `xs`. + +When `fn(x, i)` returns an array, its result will be concatenated with the +result array. If `fn(x, i)` returns anything else, that value will be pushed +onto the end of the result array. + +install +======= + +With [npm](http://npmjs.org) do: + +``` +npm install concat-map +``` + +license +======= + +MIT + +notes +===== + +This module was written while sitting high above the ground in a tree. diff --git a/node_modules/concat-map/example/map.js b/node_modules/concat-map/example/map.js new file mode 100644 index 00000000..33656217 --- /dev/null +++ b/node_modules/concat-map/example/map.js @@ -0,0 +1,6 @@ +var concatMap = require('../'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); diff --git a/node_modules/concat-map/index.js b/node_modules/concat-map/index.js new file mode 100644 index 00000000..b29a7812 --- /dev/null +++ b/node_modules/concat-map/index.js @@ -0,0 +1,13 @@ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/concat-map/package.json b/node_modules/concat-map/package.json new file mode 100644 index 00000000..d3640e6b --- /dev/null +++ b/node_modules/concat-map/package.json @@ -0,0 +1,43 @@ +{ + "name" : "concat-map", + "description" : "concatenative mapdashery", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/node-concat-map.git" + }, + "main" : "index.js", + "keywords" : [ + "concat", + "concatMap", + "map", + "functional", + "higher-order" + ], + "directories" : { + "example" : "example", + "test" : "test" + }, + "scripts" : { + "test" : "tape test/*.js" + }, + "devDependencies" : { + "tape" : "~2.4.0" + }, + "license" : "MIT", + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : { + "ie" : [ 6, 7, 8, 9 ], + "ff" : [ 3.5, 10, 15.0 ], + "chrome" : [ 10, 22 ], + "safari" : [ 5.1 ], + "opera" : [ 12 ] + } + } +} diff --git a/node_modules/concat-map/test/map.js b/node_modules/concat-map/test/map.js new file mode 100644 index 00000000..fdbd7022 --- /dev/null +++ b/node_modules/concat-map/test/map.js @@ -0,0 +1,39 @@ +var concatMap = require('../'); +var test = require('tape'); + +test('empty or not', function (t) { + var xs = [ 1, 2, 3, 4, 5, 6 ]; + var ixes = []; + var ys = concatMap(xs, function (x, ix) { + ixes.push(ix); + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; + }); + t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]); + t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]); + t.end(); +}); + +test('always something', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ]; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('scalars', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : x; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('undefs', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function () {}); + t.same(ys, [ undefined, undefined, undefined, undefined ]); + t.end(); +}); diff --git a/node_modules/crypto/README.md b/node_modules/crypto/README.md new file mode 100644 index 00000000..5437f14d --- /dev/null +++ b/node_modules/crypto/README.md @@ -0,0 +1,7 @@ +# Deprecated Package + +This package is no longer supported and has been deprecated. To avoid malicious use, npm is hanging on to the package name. + +It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in. + +Please contact support@npmjs.com if you have questions about this package. diff --git a/node_modules/crypto/package.json b/node_modules/crypto/package.json new file mode 100644 index 00000000..01aa4d3f --- /dev/null +++ b/node_modules/crypto/package.json @@ -0,0 +1,19 @@ +{ + "name": "crypto", + "version": "1.0.1", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/deprecate-holder.git" + }, + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/deprecate-holder/issues" + }, + "homepage": "https://github.com/npm/deprecate-holder#readme" +} diff --git a/node_modules/fs.realpath/LICENSE b/node_modules/fs.realpath/LICENSE new file mode 100644 index 00000000..5bd884c2 --- /dev/null +++ b/node_modules/fs.realpath/LICENSE @@ -0,0 +1,43 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs.realpath/README.md b/node_modules/fs.realpath/README.md new file mode 100644 index 00000000..a42ceac6 --- /dev/null +++ b/node_modules/fs.realpath/README.md @@ -0,0 +1,33 @@ +# fs.realpath + +A backwards-compatible fs.realpath for Node v6 and above + +In Node v6, the JavaScript implementation of fs.realpath was replaced +with a faster (but less resilient) native implementation. That raises +new and platform-specific errors and cannot handle long or excessively +symlink-looping paths. + +This module handles those cases by detecting the new errors and +falling back to the JavaScript implementation. On versions of Node +prior to v6, it has no effect. + +## USAGE + +```js +var rp = require('fs.realpath') + +// async version +rp.realpath(someLongAndLoopingPath, function (er, real) { + // the ELOOP was handled, but it was a bit slower +}) + +// sync version +var real = rp.realpathSync(someLongAndLoopingPath) + +// monkeypatch at your own risk! +// This replaces the fs.realpath/fs.realpathSync builtins +rp.monkeypatch() + +// un-do the monkeypatching +rp.unmonkeypatch() +``` diff --git a/node_modules/fs.realpath/index.js b/node_modules/fs.realpath/index.js new file mode 100644 index 00000000..b09c7c7e --- /dev/null +++ b/node_modules/fs.realpath/index.js @@ -0,0 +1,66 @@ +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = require('fs') +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = require('./old.js') + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} diff --git a/node_modules/fs.realpath/old.js b/node_modules/fs.realpath/old.js new file mode 100644 index 00000000..b40305e7 --- /dev/null +++ b/node_modules/fs.realpath/old.js @@ -0,0 +1,303 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = require('path'); +var isWindows = process.platform === 'win32'; +var fs = require('fs'); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; diff --git a/node_modules/fs.realpath/package.json b/node_modules/fs.realpath/package.json new file mode 100644 index 00000000..3edc57d2 --- /dev/null +++ b/node_modules/fs.realpath/package.json @@ -0,0 +1,26 @@ +{ + "name": "fs.realpath", + "version": "1.0.0", + "description": "Use node's fs.realpath, but fall back to the JS implementation if the native one fails", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "scripts": { + "test": "tap test/*.js --cov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/fs.realpath.git" + }, + "keywords": [ + "realpath", + "fs", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "old.js", + "index.js" + ] +} diff --git a/node_modules/fs/README.md b/node_modules/fs/README.md new file mode 100644 index 00000000..5e9a74ca --- /dev/null +++ b/node_modules/fs/README.md @@ -0,0 +1,9 @@ +# Security holding package + +This package name is not currently in use, but was formerly occupied +by another package. To avoid malicious use, npm is hanging on to the +package name, but loosely, and we'll probably give it to you if you +want it. + +You may adopt this package by contacting support@npmjs.com and +requesting the name. diff --git a/node_modules/fs/package.json b/node_modules/fs/package.json new file mode 100644 index 00000000..11661b03 --- /dev/null +++ b/node_modules/fs/package.json @@ -0,0 +1,20 @@ +{ + "name": "fs", + "version": "0.0.1-security", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/security-holder.git" + }, + "keywords": [], + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/security-holder/issues" + }, + "homepage": "https://github.com/npm/security-holder#readme" +} diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE new file mode 100644 index 00000000..42ca266d --- /dev/null +++ b/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md new file mode 100644 index 00000000..83f0c83a --- /dev/null +++ b/node_modules/glob/README.md @@ -0,0 +1,378 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![a fun cartoon logo made of glob characters](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. +* `fs` File-system object with Node's `fs` API. By default, the built-in + `fs` module will be used. Set to a volume provided by a library like + `memfs` to avoid using the "real" file-system. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/node_modules/glob/common.js b/node_modules/glob/common.js new file mode 100644 index 00000000..424c46e1 --- /dev/null +++ b/node_modules/glob/common.js @@ -0,0 +1,238 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + // always treat \ in patterns as escapes, not path separators + options.allowWindowsEscape = false + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/glob/glob.js b/node_modules/glob/glob.js new file mode 100644 index 00000000..37a4d7e6 --- /dev/null +++ b/node_modules/glob/glob.js @@ -0,0 +1,790 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json new file mode 100644 index 00000000..5940b649 --- /dev/null +++ b/node_modules/glob/package.json @@ -0,0 +1,55 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.2.3", + "publishConfig": { + "tag": "v7-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^15.0.6", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/glob/sync.js b/node_modules/glob/sync.js new file mode 100644 index 00000000..2c4f4801 --- /dev/null +++ b/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert.ok(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/inflight/LICENSE b/node_modules/inflight/LICENSE new file mode 100644 index 00000000..05eeeb88 --- /dev/null +++ b/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md new file mode 100644 index 00000000..6dc89291 --- /dev/null +++ b/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/node_modules/inflight/inflight.js b/node_modules/inflight/inflight.js new file mode 100644 index 00000000..48202b3c --- /dev/null +++ b/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/node_modules/inflight/package.json b/node_modules/inflight/package.json new file mode 100644 index 00000000..6084d350 --- /dev/null +++ b/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE new file mode 100644 index 00000000..dea3013d --- /dev/null +++ b/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md new file mode 100644 index 00000000..b1c56658 --- /dev/null +++ b/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js new file mode 100644 index 00000000..f71f2d93 --- /dev/null +++ b/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js new file mode 100644 index 00000000..86bbb3dc --- /dev/null +++ b/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json new file mode 100644 index 00000000..37b4366b --- /dev/null +++ b/node_modules/inherits/package.json @@ -0,0 +1,29 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.4", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "tap" + }, + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] +} diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 00000000..33ede1d6 --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,230 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### allowWindowsEscape + +Windows path separator `\` is by default converted to `/`, which +prohibits the usage of `\` as a escape character. This flag skips that +behavior and allows using the escape character. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js new file mode 100644 index 00000000..fda45ade --- /dev/null +++ b/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 00000000..566efdfe --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.1.2", + "publishConfig": { + "tag": "v3-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 00000000..1f1ffca9 --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 00000000..23540673 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 00000000..16815b2f --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/path-is-absolute/index.js b/node_modules/path-is-absolute/index.js new file mode 100644 index 00000000..22aa6c35 --- /dev/null +++ b/node_modules/path-is-absolute/index.js @@ -0,0 +1,20 @@ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/path-is-absolute/license b/node_modules/path-is-absolute/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/node_modules/path-is-absolute/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-is-absolute/package.json b/node_modules/path-is-absolute/package.json new file mode 100644 index 00000000..91196d5e --- /dev/null +++ b/node_modules/path-is-absolute/package.json @@ -0,0 +1,43 @@ +{ + "name": "path-is-absolute", + "version": "1.0.1", + "description": "Node.js 0.12 path.isAbsolute() ponyfill", + "license": "MIT", + "repository": "sindresorhus/path-is-absolute", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "path", + "paths", + "file", + "dir", + "absolute", + "isabsolute", + "is-absolute", + "built-in", + "util", + "utils", + "core", + "ponyfill", + "polyfill", + "shim", + "is", + "detect", + "check" + ], + "devDependencies": { + "xo": "^0.16.0" + } +} diff --git a/node_modules/path-is-absolute/readme.md b/node_modules/path-is-absolute/readme.md new file mode 100644 index 00000000..8dbdf5fc --- /dev/null +++ b/node_modules/path-is-absolute/readme.md @@ -0,0 +1,59 @@ +# path-is-absolute [![Build Status](https://travis-ci.org/sindresorhus/path-is-absolute.svg?branch=master)](https://travis-ci.org/sindresorhus/path-is-absolute) + +> Node.js 0.12 [`path.isAbsolute()`](http://nodejs.org/api/path.html#path_path_isabsolute_path) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save path-is-absolute +``` + + +## Usage + +```js +const pathIsAbsolute = require('path-is-absolute'); + +// Running on Linux +pathIsAbsolute('/home/foo'); +//=> true +pathIsAbsolute('C:/Users/foo'); +//=> false + +// Running on Windows +pathIsAbsolute('C:/Users/foo'); +//=> true +pathIsAbsolute('/home/foo'); +//=> false + +// Running on any OS +pathIsAbsolute.posix('/home/foo'); +//=> true +pathIsAbsolute.posix('C:/Users/foo'); +//=> false +pathIsAbsolute.win32('C:/Users/foo'); +//=> true +pathIsAbsolute.win32('/home/foo'); +//=> false +``` + + +## API + +See the [`path.isAbsolute()` docs](http://nodejs.org/api/path.html#path_path_isabsolute_path). + +### pathIsAbsolute(path) + +### pathIsAbsolute.posix(path) + +POSIX specific version. + +### pathIsAbsolute.win32(path) + +Windows specific version. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/rimraf/CHANGELOG.md b/node_modules/rimraf/CHANGELOG.md new file mode 100644 index 00000000..f116f141 --- /dev/null +++ b/node_modules/rimraf/CHANGELOG.md @@ -0,0 +1,65 @@ +# v3.0 + +- Add `--preserve-root` option to executable (default true) +- Drop support for Node.js below version 6 + +# v2.7 + +- Make `glob` an optional dependency + +# 2.6 + +- Retry on EBUSY on non-windows platforms as well +- Make `rimraf.sync` 10000% more reliable on Windows + +# 2.5 + +- Handle Windows EPERM when lstat-ing read-only dirs +- Add glob option to pass options to glob + +# 2.4 + +- Add EPERM to delay/retry loop +- Add `disableGlob` option + +# 2.3 + +- Make maxBusyTries and emfileWait configurable +- Handle weird SunOS unlink-dir issue +- Glob the CLI arg for better Windows support + +# 2.2 + +- Handle ENOENT properly on Windows +- Allow overriding fs methods +- Treat EPERM as indicative of non-empty dir +- Remove optional graceful-fs dep +- Consistently return null error instead of undefined on success +- win32: Treat ENOTEMPTY the same as EBUSY +- Add `rimraf` binary + +# 2.1 + +- Fix SunOS error code for a non-empty directory +- Try rmdir before readdir +- Treat EISDIR like EPERM +- Remove chmod +- Remove lstat polyfill, node 0.7 is not supported + +# 2.0 + +- Fix myGid call to check process.getgid +- Simplify the EBUSY backoff logic. +- Use fs.lstat in node >= 0.7.9 +- Remove gently option +- remove fiber implementation +- Delete files that are marked read-only + +# 1.0 + +- Allow ENOENT in sync method +- Throw when no callback is provided +- Make opts.gently an absolute path +- use 'stat' if 'lstat' is not available +- Consistent error naming, and rethrow non-ENOENT stat errors +- add fiber implementation diff --git a/node_modules/rimraf/LICENSE b/node_modules/rimraf/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/rimraf/README.md b/node_modules/rimraf/README.md new file mode 100644 index 00000000..423b8cf8 --- /dev/null +++ b/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/rimraf/bin.js b/node_modules/rimraf/bin.js new file mode 100755 index 00000000..023814cc --- /dev/null +++ b/node_modules/rimraf/bin.js @@ -0,0 +1,68 @@ +#!/usr/bin/env node + +const rimraf = require('./') + +const path = require('path') + +const isRoot = arg => /^(\/|[a-zA-Z]:\\)$/.test(path.resolve(arg)) +const filterOutRoot = arg => { + const ok = preserveRoot === false || !isRoot(arg) + if (!ok) { + console.error(`refusing to remove ${arg}`) + console.error('Set --no-preserve-root to allow this') + } + return ok +} + +let help = false +let dashdash = false +let noglob = false +let preserveRoot = true +const args = process.argv.slice(2).filter(arg => { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else if (arg === '--preserve-root') + preserveRoot = true + else if (arg === '--no-preserve-root') + preserveRoot = false + else + return !!arg +}).filter(arg => !preserveRoot || filterOutRoot(arg)) + +const go = n => { + if (n >= args.length) + return + const options = noglob ? { glob: false } : {} + rimraf(args[n], options, er => { + if (er) + throw er + go(n+1) + }) +} + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + const log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + log(' --preserve-root Do not remove \'/\' (default)') + log(' --no-preserve-root Do not treat \'/\' specially') + log(' -- Stop parsing flags') + process.exit(help ? 0 : 1) +} else + go(0) diff --git a/node_modules/rimraf/package.json b/node_modules/rimraf/package.json new file mode 100644 index 00000000..1bf8d5e3 --- /dev/null +++ b/node_modules/rimraf/package.json @@ -0,0 +1,32 @@ +{ + "name": "rimraf", + "version": "3.0.2", + "main": "rimraf.js", + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "test": "tap test/*.js" + }, + "bin": "./bin.js", + "dependencies": { + "glob": "^7.1.3" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^12.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/rimraf/rimraf.js b/node_modules/rimraf/rimraf.js new file mode 100644 index 00000000..34da4171 --- /dev/null +++ b/node_modules/rimraf/rimraf.js @@ -0,0 +1,360 @@ +const assert = require("assert") +const path = require("path") +const fs = require("fs") +let glob = undefined +try { + glob = require("glob") +} catch (_err) { + // treat glob as optional. +} + +const defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +let timeout = 0 + +const isWindows = (process.platform === "win32") + +const defaults = options => { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +const rimraf = (p, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + let busyTries = 0 + let errState = null + let n = 0 + + const next = (er) => { + errState = errState || er + if (--n === 0) + cb(errState) + } + + const afterGlob = (er, results) => { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(p => { + const CB = (er) => { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(() => rimraf_(p, options, CB), timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + } + rimraf_(p, options, CB) + }) + } + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, (er, stat) => { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +const rimraf_ = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, er => { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +const fixWinEPERM = (p, options, er, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.chmod(p, 0o666, er2 => { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, (er3, stats) => { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +const fixWinEPERMSync = (p, options, er) => { + assert(p) + assert(options) + + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + let stats + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +const rmdir = (p, options, originalEr, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +const rmkids = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, (er, files) => { + if (er) + return cb(er) + let n = files.length + if (n === 0) + return options.rmdir(p, cb) + let errState + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +const rimrafSync = (p, options) => { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + let results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (let i = 0; i < results.length; i++) { + const p = results[i] + + let st + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +const rmdirSync = (p, options, originalEr) => { + assert(p) + assert(options) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +const rmkidsSync = (p, options) => { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const retries = isWindows ? 100 : 1 + let i = 0 + do { + let threw = true + try { + const ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} + +module.exports = rimraf +rimraf.sync = rimrafSync diff --git a/node_modules/tmp-promise/.circleci/config.yml b/node_modules/tmp-promise/.circleci/config.yml new file mode 100644 index 00000000..918b1208 --- /dev/null +++ b/node_modules/tmp-promise/.circleci/config.yml @@ -0,0 +1,53 @@ +version: 2 + +common_steps: &common_steps + steps: + - checkout + + - run: + name: Install dependencies + command: npm install + + - run: + name: Run tests + command: npm run mocha + + - run: + name: Check Typescript types + command: npm run check-types + when: always + +jobs: + node-8: + docker: + - image: circleci/node:8 + + <<: *common_steps + + node-10: + docker: + - image: circleci/node:10 + + <<: *common_steps + + node-11: + docker: + - image: circleci/node:11 + + <<: *common_steps + + node-12: + docker: + - image: circleci/node:12 + + <<: *common_steps + +workflows: + version: 2 + + on-commit: + jobs: + - node-8 + - node-10 + - node-11 + - node-12 diff --git a/node_modules/tmp-promise/README.md b/node_modules/tmp-promise/README.md new file mode 100644 index 00000000..49f667f3 --- /dev/null +++ b/node_modules/tmp-promise/README.md @@ -0,0 +1,316 @@ +# tmp-promise + +[![CircleCI](https://circleci.com/gh/benjamingr/tmp-promise.svg?style=svg)](https://circleci.com/gh/benjamingr/tmp-promise) +[![npm version](https://badge.fury.io/js/tmp-promise.svg)](https://badge.fury.io/js/tmp-promise) + +A simple utility for creating temporary files or directories. + +The [tmp](https://github.com/raszi/node-tmp) package with promises support. If you want to use `tmp` with `async/await` then this helper might be for you. + +This documentation is mostly copied from that package's - but with promise usage instead of callback usage adapted. + +## Installation + + npm i tmp-promise + +**Note:** Node.js 8+ is supported - older versions of Node.js are not supported by the Node.js foundation. If you need to use an older version of Node.js install tmp-promise@1.10 + + npm i tmp-promise@1.1.0 + +## About + +This adds promises support to a [widely used library][2]. This package is used to create temporary files and directories in a [Node.js][1] environment. + + +tmp-promise offers both an asynchronous and a synchronous API. For all API calls, all +the parameters are optional. + +Internally, tmp uses crypto for determining random file names, or, when using templates, a six letter random identifier. And just in case that you do not have that much entropy left on your system, tmp will fall back to pseudo random numbers. + +You can set whether you want to remove the temporary file on process exit or not, and the destination directory can also be set. + +tmp-promise also uses promise [disposers](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern) to provide a nice way to perform cleanup when you're done working with the files. + +## Usage (API Reference) + +### Asynchronous file creation + +Simple temporary file creation, the file will be closed and unlinked on process exit. + +With Node.js 10 and es - modules: + +```js +import { file } from 'tmp-promise' + +(async () => { + const {fd, path, cleanup} = await file(); + // work with file here in fd + cleanup(); +})(); +``` + +Or the older way: + +```javascript +var tmp = require('tmp-promise'); + +tmp.file().then(o => { + console.log("File: ", o.path); + console.log("Filedescriptor: ", o.fd); + + // If we don't need the file anymore we could manually call cleanup + // But that is not necessary if we didn't pass the keep option because the library + // will clean after itself. + o.cleanup(); +}); +``` + +Simple temporary file creation with a [disposer](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern): + +With Node.js 10 and es - modules: + +```js +import { withFile } from 'tmp-promise' + +withFile(async ({path, fd}) => { + // when this function returns or throws - release the file + await doSomethingWithFile(db); +}); +``` + +Or the older way: + +```js +tmp.withFile(o => { + console.log("File: ", o.path); + console.log("Filedescriptor: ", o.fd); + // the file remains opens until the below promise resolves + return somePromiseReturningFn(); +}).then(v => { + // file is closed here automatically, v is the value of somePromiseReturningFn +}); +``` + + +### Synchronous file creation + +A synchronous version of the above. + +```javascript +var tmp = require('tmp-promise'); + +var tmpobj = tmp.fileSync(); +console.log("File: ", tmpobj.name); +console.log("Filedescriptor: ", tmpobj.fd); + +// If we don't need the file anymore we could manually call the removeCallback +// But that is not necessary if we didn't pass the keep option because the library +// will clean after itself. +tmpobj.removeCallback(); +``` + +Note that this might throw an exception if either the maximum limit of retries +for creating a temporary name fails, or, in case that you do not have the permission +to write to the directory where the temporary file should be created in. + +### Asynchronous directory creation + +Simple temporary directory creation, it will be removed on process exit. + +If the directory still contains items on process exit, then it won't be removed. + +```javascript +var tmp = require('tmp-promise'); + +tmp.dir().then(o => { + console.log("Dir: ", o.path); + + // Manual cleanup + o.cleanup(); +}); +``` + +If you want to cleanup the directory even when there are entries in it, then +you can pass the `unsafeCleanup` option when creating it. + +You can also use a [disposer](http://stackoverflow.com/questions/28915677/what-is-the-promise-disposer-pattern) here which takes care of cleanup automatically: + +```javascript +var tmp = require('tmp-promise'); + +tmp.withDir(o => { + console.log("Dir: ", o.path); + + // automatic cleanup when the below promise resolves + return somePromiseReturningFn(); +}).then(v => { + // the directory has been cleaned here +}); +``` + +### Synchronous directory creation + +A synchronous version of the above. + +```javascript +var tmp = require('tmp-promise'); + +var tmpobj = tmp.dirSync(); +console.log("Dir: ", tmpobj.name); +// Manual cleanup +tmpobj.removeCallback(); +``` + +Note that this might throw an exception if either the maximum limit of retries +for creating a temporary name fails, or, in case that you do not have the permission +to write to the directory where the temporary directory should be created in. + +### Asynchronous filename generation + +It is possible with this library to generate a unique filename in the specified +directory. + +```javascript +var tmp = require('tmp-promise'); + +tmp.tmpName().then(path => { + console.log("Created temporary filename: ", path); +}); +``` + +### Synchronous filename generation + +A synchronous version of the above. + +```javascript +var tmp = require('tmp-promise'); + +var name = tmp.tmpNameSync(); +console.log("Created temporary filename: ", name); +``` + +## Advanced usage + +### Asynchronous file creation + +Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`. + +```javascript +var tmp = require('tmp-promise'); + +tmp.file({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }).then(o => { + console.log("File: ", o.path); + console.log("Filedescriptor: ", o.fd); +}); +``` + +### Synchronous file creation + +A synchronous version of the above. + +```javascript +var tmp = require('tmp-promise'); + +var tmpobj = tmp.fileSync({ mode: 0644, prefix: 'prefix-', postfix: '.txt' }); +console.log("File: ", tmpobj.name); +console.log("Filedescriptor: ", tmpobj.fd); +``` + +### Asynchronous directory creation + +Creates a directory with mode `0755`, prefix will be `myTmpDir_`. + +```javascript +var tmp = require('tmp-promise'); + +tmp.dir({ mode: 0750, prefix: 'myTmpDir_' }).then(o => { + console.log("Dir: ", o.path); +}); +``` + +### Synchronous directory creation + +Again, a synchronous version of the above. + +```javascript +var tmp = require('tmp-promise'); + +var tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' }); +console.log("Dir: ", tmpobj.name); +``` + + +### mkstemp like, asynchronously + +Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`. + +```javascript +var tmp = require('tmp-promise'); +tmp.dir({ template: '/tmp/tmp-XXXXXX' }).then(console.log); +``` + + +### mkstemp like, synchronously + +This will behave similarly to the asynchronous version. + +```javascript +var tmp = require('tmp-promise'); + +var tmpobj = tmp.dirSync({ template: '/tmp/tmp-XXXXXX' }); +console.log("Dir: ", tmpobj.name); +``` + +### Asynchronous filename generation + +The `tmpName()` function accepts the `prefix`, `postfix`, `dir`, etc. parameters also: + +```javascript +var tmp = require('tmp-promise'); + +tmp.tmpName({ template: '/tmp/tmp-XXXXXX' }).then(path => + console.log("Created temporary filename: ", path); +); +``` + +### Synchronous filename generation + +The `tmpNameSync()` function works similarly to `tmpName()`. + +```javascript +var tmp = require('tmp-promise'); +var tmpname = tmp.tmpNameSync({ template: '/tmp/tmp-XXXXXX' }); +console.log("Created temporary filename: ", tmpname); +``` + + +## Graceful cleanup + +One may want to cleanup the temporary files even when an uncaught exception +occurs. To enforce this, you can call the `setGracefulCleanup()` method: + +```javascript +var tmp = require('tmp'); + +tmp.setGracefulCleanup(); +``` + +## Options + +All options are optional :) + + * `mode`: the file mode to create with, it fallbacks to `0600` on file creation and `0700` on directory creation + * `prefix`: the optional prefix, fallbacks to `tmp-` if not provided + * `postfix`: the optional postfix, fallbacks to `.tmp` on file creation + * `template`: [`mkstemp`][3] like filename template, no default + * `dir`: the optional temporary directory, fallbacks to system default (guesses from environment) + * `tries`: how many times should the function try to get a unique filename before giving up, default `3` + * `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false`, means delete + * Please keep in mind that it is recommended in this case to call the provided `cleanupCallback` function manually. + * `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false` + + + +[1]: http://nodejs.org/ +[2]: https://www.npmjs.com/browse/depended/tmp +[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html diff --git a/node_modules/tmp-promise/example-usage.js b/node_modules/tmp-promise/example-usage.js new file mode 100644 index 00000000..1ea9ca58 --- /dev/null +++ b/node_modules/tmp-promise/example-usage.js @@ -0,0 +1,9 @@ +var tmp = require("./index.js"); +var Promise = require("bluebird"); // just for delay, this works with native promises +// disposer +tmp.withFile((path) => { + console.log("Created at path", path); + return Promise.delay(1000); +}).then(() => { + console.log("File automatically disposed"); +}); \ No newline at end of file diff --git a/node_modules/tmp-promise/index.d.ts b/node_modules/tmp-promise/index.d.ts new file mode 100644 index 00000000..055da8a0 --- /dev/null +++ b/node_modules/tmp-promise/index.d.ts @@ -0,0 +1,27 @@ +import { fileSync, dirSync, tmpNameSync, setGracefulCleanup } from "tmp"; +import { FileOptions, DirOptions, TmpNameOptions } from "tmp"; + +export interface DirectoryResult { + path: string; + cleanup: () => Promise; +} + +export interface FileResult extends DirectoryResult { + fd: number; +} + +export function file(options?: FileOptions): Promise; +export function withFile( + fn: (result: FileResult) => Promise, + options?: FileOptions +): Promise; + +export function dir(options?: DirOptions): Promise; +export function withDir( + fn: (results: DirectoryResult) => Promise, + options?: DirOptions +): Promise; + +export function tmpName(options?: TmpNameOptions): Promise; + +export { fileSync, dirSync, tmpNameSync, setGracefulCleanup }; diff --git a/node_modules/tmp-promise/index.js b/node_modules/tmp-promise/index.js new file mode 100644 index 00000000..0a39d0fa --- /dev/null +++ b/node_modules/tmp-promise/index.js @@ -0,0 +1,50 @@ +'use strict'; + +const { promisify } = require("util"); +const tmp = require("tmp"); + +// file +module.exports.fileSync = tmp.fileSync; +const fileWithOptions = promisify((options, cb) => + tmp.file(options, (err, path, fd, cleanup) => + err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) }) + ) +); +module.exports.file = async (options) => fileWithOptions(options); + +module.exports.withFile = async function withFile(fn, options) { + const { path, fd, cleanup } = await module.exports.file(options); + try { + return await fn({ path, fd }); + } finally { + await cleanup(); + } +}; + + +// directory +module.exports.dirSync = tmp.dirSync; +const dirWithOptions = promisify((options, cb) => + tmp.dir(options, (err, path, cleanup) => + err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) }) + ) +); +module.exports.dir = async (options) => dirWithOptions(options); + +module.exports.withDir = async function withDir(fn, options) { + const { path, cleanup } = await module.exports.dir(options); + try { + return await fn({ path }); + } finally { + await cleanup(); + } +}; + + +// name generation +module.exports.tmpNameSync = tmp.tmpNameSync; +module.exports.tmpName = promisify(tmp.tmpName); + +module.exports.tmpdir = tmp.tmpdir; + +module.exports.setGracefulCleanup = tmp.setGracefulCleanup; diff --git a/node_modules/tmp-promise/index.test-d.ts b/node_modules/tmp-promise/index.test-d.ts new file mode 100644 index 00000000..375f0ec0 --- /dev/null +++ b/node_modules/tmp-promise/index.test-d.ts @@ -0,0 +1,31 @@ +import { file, withFile, dir, withDir, tmpName } from "."; + +async function fileExample() { + const { path, fd, cleanup } = await file({ discardDescriptor: true }); + await cleanup(); + + await withFile( + async ({ path, fd, cleanup }) => { + console.log(fd); + await cleanup(); + }, + { discardDescriptor: true } + ); +} + +async function dirExample() { + const { path, cleanup } = await dir({ unsafeCleanup: true }); + await cleanup(); + + await withDir( + async ({ path, cleanup }) => { + console.log(path); + await cleanup(); + }, + { unsafeCleanup: true } + ); +} + +async function tmpNameExample() { + const name = await tmpName({ tries: 3 }); +} diff --git a/node_modules/tmp-promise/package.json b/node_modules/tmp-promise/package.json new file mode 100644 index 00000000..029a1f97 --- /dev/null +++ b/node_modules/tmp-promise/package.json @@ -0,0 +1,34 @@ +{ + "name": "tmp-promise", + "version": "3.0.3", + "description": "The tmp package with promises support and disposers.", + "main": "index.js", + "types": "index.d.ts", + "scripts": { + "mocha": "mocha", + "check-types": "tsd", + "test": "npm run mocha && npm run check-types", + "publish": "node publish" + }, + "keywords": [ + "tmp", + "promise", + "tempfile", + "mkdtemp", + "mktemp" + ], + "author": "Benjamin Gruenbaum and Collaborators.", + "license": "MIT", + "repository": { + "type": "git", + "url": "git://github.com/benjamingr/tmp-promise.git" + }, + "dependencies": { + "tmp": "^0.2.0" + }, + "devDependencies": { + "@types/tmp": "^0.2.0", + "mocha": "7.1.2", + "tsd": "0.11.0" + } +} diff --git a/node_modules/tmp-promise/publish.js b/node_modules/tmp-promise/publish.js new file mode 100644 index 00000000..21feefa5 --- /dev/null +++ b/node_modules/tmp-promise/publish.js @@ -0,0 +1,8 @@ +'use strict'; + +const { execSync } = require('child_process'); +const { version } = require('./package'); + +execSync('npm run test'); +execSync(`git tag v${version}`); +execSync('npm publish'); \ No newline at end of file diff --git a/node_modules/tmp-promise/test.js b/node_modules/tmp-promise/test.js new file mode 100644 index 00000000..eb956658 --- /dev/null +++ b/node_modules/tmp-promise/test.js @@ -0,0 +1,153 @@ +'use strict'; + +const fs = require('fs') +const assert = require('assert') +const { extname } = require('path') + +const tmp = require('.') + +async function checkFileResult(result) { + assert.deepEqual(Object.keys(result).sort(), ['cleanup', 'fd', 'path']) + + const { path, fd, cleanup } = result + assert.ok(typeof path === 'string') + assert.ok(typeof fd === 'number') + assert.ok(typeof cleanup === 'function') + + // Check that the path is a fille. + assert.ok(fs.statSync(path).isFile()) + + // Check that the fd is correct and points to the file. + const message = 'hello there!' + fs.writeSync(fd, message) + fs.fdatasyncSync(fd) + assert.equal(fs.readFileSync(path), message) + + // Check that the cleanup works. + await cleanup() + assert.throws(() => fs.statSync(path)) +} + +describe('file()', function() +{ + context('when called without options', function() + { + it('creates the file, returns the expected result, and the cleanup function works', async function() + { + const result = await tmp.file() + await checkFileResult(result) + }) + }) + + context('when called with options', function() + { + it('creates the file, returns the expected result, and the cleanup function works', async function() + { + const prefix = 'myTmpDir_' + const result = await tmp.file({ prefix }) + await checkFileResult(result) + assert.ok(result.path.includes(prefix)) + }) + }) + + it('propagates errors', async function() { + try { + await tmp.file({ dir: 'nonexistent-path' }); + throw Error('Expected to throw'); + } catch (e) { + assert.ok(e.message.startsWith('ENOENT: no such file or directory')); + } + }); +}) + +async function checkDirResult(result) { + assert.deepEqual(Object.keys(result).sort(), ['cleanup', 'path']) + + const { path, cleanup } = result + assert.ok(typeof path === 'string') + assert.ok(typeof cleanup === 'function') + + assert.ok(fs.statSync(path).isDirectory()) + + await cleanup() + assert.throws(() => fs.statSync(path)) +} + +describe('dir()', function() +{ + context('when called without options', function() + { + it('creates the directory, returns the expected result, and the cleanup function works', async function() + { + const result = await tmp.dir() + await checkDirResult(result) + }) + }) + + context('when called with options', function() + { + it('creates the directory, returns the expected result, and the cleanup function works', async function() + { + const prefix = 'myTmpDir_' + const result = await tmp.dir({ prefix }) + await checkDirResult(result) + assert.ok(result.path.includes(prefix)) + }) + }) + + it('propagates errors', async function() { + try { + await tmp.dir({ dir: 'nonexistent-path' }); + throw Error('Expected to throw'); + } catch (e) { + assert.ok(e.message.startsWith('ENOENT: no such file or directory')); + } + }); +}) + +describe('withFile()', function() +{ + it("file doesn't exist after going out of scope", function() + { + var filepath + + return tmp.withFile(function(o) + { + filepath = o.path + + fs.accessSync(filepath) + assert.strictEqual(extname(filepath), '.txt') + }, {discardDescriptor: true, postfix: '.txt'}) + .then(function() + { + assert.throws(function() + { + fs.accessSync(filepath) + }, filepath + ' still exists') + }) + }) +}) + + +describe('withDir()', function() +{ + it("dir doesn't exist after going out of scope", function() + { + var filepath + + return tmp.withDir(function(o) + { + filepath = o.path + + fs.accessSync(filepath) + assert.strictEqual(extname(filepath), '.dir') + }, {postfix: '.dir'}) + .then(function() + { + assert.throws(function() + { + fs.accessSync(filepath) + }, filepath + ' still exists') + }) + }) +}) diff --git a/node_modules/tmp/CHANGELOG.md b/node_modules/tmp/CHANGELOG.md new file mode 100644 index 00000000..0aa54882 --- /dev/null +++ b/node_modules/tmp/CHANGELOG.md @@ -0,0 +1,288 @@ + + +## v0.2.1 (2020-04-28) + +#### :rocket: Enhancement +* [#252](https://github.com/raszi/node-tmp/pull/252) Closes [#250](https://github.com/raszi/node-tmp/issues/250): introduce tmpdir option for overriding the system tmp dir ([@silkentrance](https://github.com/silkentrance)) + +#### :house: Internal +* [#253](https://github.com/raszi/node-tmp/pull/253) Closes [#191](https://github.com/raszi/node-tmp/issues/191): generate changelog from pull requests using lerna-changelog ([@silkentrance](https://github.com/silkentrance)) + +#### Committers: 1 +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) + + +## v0.2.0 (2020-04-25) + +#### :rocket: Enhancement +* [#234](https://github.com/raszi/node-tmp/pull/234) feat: stabilize tmp for v0.2.0 release ([@silkentrance](https://github.com/silkentrance)) + +#### :bug: Bug Fix +* [#231](https://github.com/raszi/node-tmp/pull/231) Closes [#230](https://github.com/raszi/node-tmp/issues/230): regression after fix for #197 ([@silkentrance](https://github.com/silkentrance)) +* [#220](https://github.com/raszi/node-tmp/pull/220) Closes [#197](https://github.com/raszi/node-tmp/issues/197): return sync callback when using the sync interface, otherwise return the async callback ([@silkentrance](https://github.com/silkentrance)) +* [#193](https://github.com/raszi/node-tmp/pull/193) Closes [#192](https://github.com/raszi/node-tmp/issues/192): tmp must not exit the process on its own ([@silkentrance](https://github.com/silkentrance)) + +#### :memo: Documentation +* [#221](https://github.com/raszi/node-tmp/pull/221) Gh 206 document name option ([@silkentrance](https://github.com/silkentrance)) + +#### :house: Internal +* [#226](https://github.com/raszi/node-tmp/pull/226) Closes [#212](https://github.com/raszi/node-tmp/issues/212): enable direct name option test ([@silkentrance](https://github.com/silkentrance)) +* [#225](https://github.com/raszi/node-tmp/pull/225) Closes [#211](https://github.com/raszi/node-tmp/issues/211): existing tests must clean up after themselves ([@silkentrance](https://github.com/silkentrance)) +* [#224](https://github.com/raszi/node-tmp/pull/224) Closes [#217](https://github.com/raszi/node-tmp/issues/217): name tests must use tmpName ([@silkentrance](https://github.com/silkentrance)) +* [#223](https://github.com/raszi/node-tmp/pull/223) Closes [#214](https://github.com/raszi/node-tmp/issues/214): refactor tests and lib ([@silkentrance](https://github.com/silkentrance)) +* [#198](https://github.com/raszi/node-tmp/pull/198) Update dependencies to latest versions ([@matsev](https://github.com/matsev)) + +#### Committers: 2 +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) +- Mattias Severson ([@matsev](https://github.com/matsev)) + + +## v0.1.0 (2019-03-20) + +#### :rocket: Enhancement +* [#177](https://github.com/raszi/node-tmp/pull/177) fix: fail early if there is no tmp dir specified ([@silkentrance](https://github.com/silkentrance)) +* [#159](https://github.com/raszi/node-tmp/pull/159) Closes [#121](https://github.com/raszi/node-tmp/issues/121) ([@silkentrance](https://github.com/silkentrance)) +* [#161](https://github.com/raszi/node-tmp/pull/161) Closes [#155](https://github.com/raszi/node-tmp/issues/155) ([@silkentrance](https://github.com/silkentrance)) +* [#166](https://github.com/raszi/node-tmp/pull/166) fix: avoid relying on Node’s internals ([@addaleax](https://github.com/addaleax)) +* [#144](https://github.com/raszi/node-tmp/pull/144) prepend opts.dir || tmpDir to template if no path is given ([@silkentrance](https://github.com/silkentrance)) + +#### :bug: Bug Fix +* [#183](https://github.com/raszi/node-tmp/pull/183) Closes [#182](https://github.com/raszi/node-tmp/issues/182) fileSync takes empty string postfix option ([@gutte](https://github.com/gutte)) +* [#130](https://github.com/raszi/node-tmp/pull/130) Closes [#129](https://github.com/raszi/node-tmp/issues/129) install process listeners safely ([@silkentrance](https://github.com/silkentrance)) + +#### :memo: Documentation +* [#188](https://github.com/raszi/node-tmp/pull/188) HOTCloses [#187](https://github.com/raszi/node-tmp/issues/187): restore behaviour for #182 ([@silkentrance](https://github.com/silkentrance)) +* [#180](https://github.com/raszi/node-tmp/pull/180) fix gh-179: template no longer accepts arbitrary paths ([@silkentrance](https://github.com/silkentrance)) +* [#175](https://github.com/raszi/node-tmp/pull/175) docs: add `unsafeCleanup` option to jsdoc ([@kerimdzhanov](https://github.com/kerimdzhanov)) +* [#151](https://github.com/raszi/node-tmp/pull/151) docs: fix link to tmp-promise ([@silkentrance](https://github.com/silkentrance)) + +#### :house: Internal +* [#184](https://github.com/raszi/node-tmp/pull/184) test: add missing tests for #182 ([@silkentrance](https://github.com/silkentrance)) +* [#171](https://github.com/raszi/node-tmp/pull/171) chore: drop old NodeJS support ([@poppinlp](https://github.com/poppinlp)) +* [#170](https://github.com/raszi/node-tmp/pull/170) chore: update dependencies ([@raszi](https://github.com/raszi)) +* [#165](https://github.com/raszi/node-tmp/pull/165) test: add missing tests ([@raszi](https://github.com/raszi)) +* [#163](https://github.com/raszi/node-tmp/pull/163) chore: add lint npm task ([@raszi](https://github.com/raszi)) +* [#107](https://github.com/raszi/node-tmp/pull/107) chore: add coverage report ([@raszi](https://github.com/raszi)) +* [#141](https://github.com/raszi/node-tmp/pull/141) test: refactor tests for mocha ([@silkentrance](https://github.com/silkentrance)) +* [#154](https://github.com/raszi/node-tmp/pull/154) chore: change Travis configuration ([@raszi](https://github.com/raszi)) +* [#152](https://github.com/raszi/node-tmp/pull/152) fix: drop Node v0.6.0 ([@raszi](https://github.com/raszi)) + +#### Committers: 6 +- Anna Henningsen ([@addaleax](https://github.com/addaleax)) +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) +- Dan Kerimdzhanov ([@kerimdzhanov](https://github.com/kerimdzhanov)) +- Gustav Klingstedt ([@gutte](https://github.com/gutte)) +- KARASZI István ([@raszi](https://github.com/raszi)) +- PoppinL ([@poppinlp](https://github.com/poppinlp)) + + +## v0.0.33 (2017-08-12) + +#### :rocket: Enhancement +* [#147](https://github.com/raszi/node-tmp/pull/147) fix: with name option try at most once to get a unique tmp name ([@silkentrance](https://github.com/silkentrance)) + +#### :bug: Bug Fix +* [#149](https://github.com/raszi/node-tmp/pull/149) fix(fileSync): must honor detachDescriptor and discardDescriptor options ([@silkentrance](https://github.com/silkentrance)) +* [#119](https://github.com/raszi/node-tmp/pull/119) Closes [#115](https://github.com/raszi/node-tmp/issues/115) ([@silkentrance](https://github.com/silkentrance)) + +#### :memo: Documentation +* [#128](https://github.com/raszi/node-tmp/pull/128) Closes [#127](https://github.com/raszi/node-tmp/issues/127) add reference to tmp-promise ([@silkentrance](https://github.com/silkentrance)) + +#### :house: Internal +* [#135](https://github.com/raszi/node-tmp/pull/135) Closes [#133](https://github.com/raszi/node-tmp/issues/133), #134 ([@silkentrance](https://github.com/silkentrance)) +* [#123](https://github.com/raszi/node-tmp/pull/123) docs: update tmp.js MIT license header to 2017 ([@madnight](https://github.com/madnight)) +* [#122](https://github.com/raszi/node-tmp/pull/122) chore: add issue template ([@silkentrance](https://github.com/silkentrance)) + +#### Committers: 2 +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) +- Fabian Beuke ([@madnight](https://github.com/madnight)) + + +## v0.0.32 (2017-03-24) + +#### :memo: Documentation +* [#106](https://github.com/raszi/node-tmp/pull/106) doc: add proper JSDoc documentation ([@raszi](https://github.com/raszi)) + +#### :house: Internal +* [#111](https://github.com/raszi/node-tmp/pull/111) test: add Windows tests ([@binki](https://github.com/binki)) +* [#110](https://github.com/raszi/node-tmp/pull/110) chore: add AppVeyor ([@binki](https://github.com/binki)) +* [#105](https://github.com/raszi/node-tmp/pull/105) chore: use const where possible ([@raszi](https://github.com/raszi)) +* [#104](https://github.com/raszi/node-tmp/pull/104) style: fix various style issues ([@raszi](https://github.com/raszi)) + +#### Committers: 2 +- KARASZI István ([@raszi](https://github.com/raszi)) +- Nathan Phillip Brink ([@binki](https://github.com/binki)) + + +## v0.0.31 (2016-11-21) + +#### :rocket: Enhancement +* [#99](https://github.com/raszi/node-tmp/pull/99) feat: add next callback functionality ([@silkentrance](https://github.com/silkentrance)) +* [#94](https://github.com/raszi/node-tmp/pull/94) feat: add options to control descriptor management ([@pabigot](https://github.com/pabigot)) + +#### :house: Internal +* [#101](https://github.com/raszi/node-tmp/pull/101) fix: Include files in the package.json ([@raszi](https://github.com/raszi)) + +#### Committers: 3 +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) +- KARASZI István ([@raszi](https://github.com/raszi)) +- Peter A. Bigot ([@pabigot](https://github.com/pabigot)) + + +## v0.0.30 (2016-11-01) + +#### :bug: Bug Fix +* [#96](https://github.com/raszi/node-tmp/pull/96) fix: constants for Node 6 ([@jnj16180340](https://github.com/jnj16180340)) +* [#98](https://github.com/raszi/node-tmp/pull/98) fix: garbage collector ([@Ari-H](https://github.com/Ari-H)) + +#### Committers: 2 +- Nate Johnson ([@jnj16180340](https://github.com/jnj16180340)) +- [@Ari-H](https://github.com/Ari-H) + + +## v0.0.29 (2016-09-18) + +#### :rocket: Enhancement +* [#87](https://github.com/raszi/node-tmp/pull/87) fix: replace calls to deprecated fs API functions ([@OlliV](https://github.com/OlliV)) + +#### :bug: Bug Fix +* [#70](https://github.com/raszi/node-tmp/pull/70) fix: prune `_removeObjects` correctly ([@joliss](https://github.com/joliss)) +* [#71](https://github.com/raszi/node-tmp/pull/71) Fix typo ([@gcampax](https://github.com/gcampax)) + +#### :memo: Documentation +* [#77](https://github.com/raszi/node-tmp/pull/77) docs: change mkstemps to mkstemp ([@thefourtheye](https://github.com/thefourtheye)) + +#### :house: Internal +* [#92](https://github.com/raszi/node-tmp/pull/92) chore: add Travis CI support for Node 6 ([@amilajack](https://github.com/amilajack)) +* [#79](https://github.com/raszi/node-tmp/pull/79) fix: remove unneeded require statement ([@whmountains](https://github.com/whmountains)) + +#### Committers: 6 +- Amila Welihinda ([@amilajack](https://github.com/amilajack)) +- Caleb Whiting ([@whmountains](https://github.com/whmountains)) +- Giovanni Campagna ([@gcampax](https://github.com/gcampax)) +- Jo Liss ([@joliss](https://github.com/joliss)) +- Olli Vanhoja ([@OlliV](https://github.com/OlliV)) +- Sakthipriyan Vairamani ([@thefourtheye](https://github.com/thefourtheye)) + + +## v0.0.28 (2015-09-27) + +#### :bug: Bug Fix +* [#63](https://github.com/raszi/node-tmp/pull/63) fix: delete for _rmdirRecursiveSync ([@voltrevo](https://github.com/voltrevo)) + +#### :memo: Documentation +* [#64](https://github.com/raszi/node-tmp/pull/64) docs: fix typo in the README ([@JTKnox91](https://github.com/JTKnox91)) + +#### :house: Internal +* [#67](https://github.com/raszi/node-tmp/pull/67) test: add node v4.0 v4.1 to travis config ([@raszi](https://github.com/raszi)) +* [#66](https://github.com/raszi/node-tmp/pull/66) chore(deps): update deps ([@raszi](https://github.com/raszi)) + +#### Committers: 3 +- Andrew Morris ([@voltrevo](https://github.com/voltrevo)) +- John T. Knox ([@JTKnox91](https://github.com/JTKnox91)) +- KARASZI István ([@raszi](https://github.com/raszi)) + + +## v0.0.27 (2015-08-15) + +#### :bug: Bug Fix +* [#60](https://github.com/raszi/node-tmp/pull/60) fix: unlinking when the file has been already removed ([@silkentrance](https://github.com/silkentrance)) + +#### :memo: Documentation +* [#55](https://github.com/raszi/node-tmp/pull/55) docs(README): update README ([@raszi](https://github.com/raszi)) + +#### :house: Internal +* [#56](https://github.com/raszi/node-tmp/pull/56) style(jshint): fix JSHint error ([@raszi](https://github.com/raszi)) +* [#53](https://github.com/raszi/node-tmp/pull/53) chore: update license attribute ([@pdehaan](https://github.com/pdehaan)) + +#### Committers: 3 +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) +- KARASZI István ([@raszi](https://github.com/raszi)) +- Peter deHaan ([@pdehaan](https://github.com/pdehaan)) + + +## v0.0.26 (2015-05-12) + +#### :rocket: Enhancement +* [#40](https://github.com/raszi/node-tmp/pull/40) Fix for #39 ([@silkentrance](https://github.com/silkentrance)) +* [#42](https://github.com/raszi/node-tmp/pull/42) Fix for #17 ([@silkentrance](https://github.com/silkentrance)) +* [#41](https://github.com/raszi/node-tmp/pull/41) Fix for #37 ([@silkentrance](https://github.com/silkentrance)) +* [#32](https://github.com/raszi/node-tmp/pull/32) add ability to customize file/dir names ([@shime](https://github.com/shime)) +* [#29](https://github.com/raszi/node-tmp/pull/29) tmp.file have responsibility to close file, not only unlink file ([@vhain](https://github.com/vhain)) + +#### :bug: Bug Fix +* [#51](https://github.com/raszi/node-tmp/pull/51) fix(windows): fix tempDir on windows ([@raszi](https://github.com/raszi)) +* [#49](https://github.com/raszi/node-tmp/pull/49) remove object from _removeObjects if cleanup fn is called Closes [#48](https://github.com/raszi/node-tmp/issues/48) ([@bmeck](https://github.com/bmeck)) + +#### :memo: Documentation +* [#45](https://github.com/raszi/node-tmp/pull/45) Fix for #44 ([@silkentrance](https://github.com/silkentrance)) + +#### :house: Internal +* [#34](https://github.com/raszi/node-tmp/pull/34) Create LICENSE ([@ScottWeinstein](https://github.com/ScottWeinstein)) + +#### Committers: 6 +- Bradley Farias ([@bmeck](https://github.com/bmeck)) +- Carsten Klein ([@silkentrance](https://github.com/silkentrance)) +- Hrvoje Šimić ([@shime](https://github.com/shime)) +- Juwan Yoo ([@vhain](https://github.com/vhain)) +- KARASZI István ([@raszi](https://github.com/raszi)) +- Scott Weinstein ([@ScottWeinstein](https://github.com/ScottWeinstein)) + + +## v0.0.24 (2014-07-11) + +#### :rocket: Enhancement +* [#25](https://github.com/raszi/node-tmp/pull/25) Added removeCallback passing ([@foxel](https://github.com/foxel)) + +#### Committers: 1 +- Andrey Kupreychik ([@foxel](https://github.com/foxel)) + + +## v0.0.23 (2013-12-03) + +#### :rocket: Enhancement +* [#21](https://github.com/raszi/node-tmp/pull/21) If we are not on node 0.8, don't register an uncaughtException handler ([@wibblymat](https://github.com/wibblymat)) + +#### Committers: 1 +- Mat Scales ([@wibblymat](https://github.com/wibblymat)) + + +## v0.0.22 (2013-11-29) + +#### :rocket: Enhancement +* [#19](https://github.com/raszi/node-tmp/pull/19) Rethrow only on node v0.8. ([@mcollina](https://github.com/mcollina)) + +#### Committers: 1 +- Matteo Collina ([@mcollina](https://github.com/mcollina)) + + +## v0.0.21 (2013-08-07) + +#### :bug: Bug Fix +* [#16](https://github.com/raszi/node-tmp/pull/16) Fix bug where we delete contents of symlinks ([@lightsofapollo](https://github.com/lightsofapollo)) + +#### Committers: 1 +- James Lal ([@lightsofapollo](https://github.com/lightsofapollo)) + + +## v0.0.17 (2013-04-09) + +#### :rocket: Enhancement +* [#9](https://github.com/raszi/node-tmp/pull/9) add recursive remove option ([@oscar-broman](https://github.com/oscar-broman)) + +#### Committers: 1 +- [@oscar-broman](https://github.com/oscar-broman) + + +## v0.0.14 (2012-08-26) + +#### :rocket: Enhancement +* [#5](https://github.com/raszi/node-tmp/pull/5) Export _getTmpName for temporary file name creation ([@joscha](https://github.com/joscha)) + +#### Committers: 1 +- Joscha Feth ([@joscha](https://github.com/joscha)) + + +## Previous Releases < v0.0.14 + +- no information available diff --git a/node_modules/tmp/LICENSE b/node_modules/tmp/LICENSE new file mode 100644 index 00000000..72418bd9 --- /dev/null +++ b/node_modules/tmp/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 KARASZI István + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/tmp/README.md b/node_modules/tmp/README.md new file mode 100644 index 00000000..bb20fb7b --- /dev/null +++ b/node_modules/tmp/README.md @@ -0,0 +1,365 @@ +# Tmp + +A simple temporary file and directory creator for [node.js.][1] + +[![Build Status](https://travis-ci.org/raszi/node-tmp.svg?branch=master)](https://travis-ci.org/raszi/node-tmp) +[![Dependencies](https://david-dm.org/raszi/node-tmp.svg)](https://david-dm.org/raszi/node-tmp) +[![npm version](https://badge.fury.io/js/tmp.svg)](https://badge.fury.io/js/tmp) +[![API documented](https://img.shields.io/badge/API-documented-brightgreen.svg)](https://raszi.github.io/node-tmp/) +[![Known Vulnerabilities](https://snyk.io/test/npm/tmp/badge.svg)](https://snyk.io/test/npm/tmp) + +## About + +This is a [widely used library][2] to create temporary files and directories +in a [node.js][1] environment. + +Tmp offers both an asynchronous and a synchronous API. For all API calls, all +the parameters are optional. There also exists a promisified version of the +API, see [tmp-promise][5]. + +Tmp uses crypto for determining random file names, or, when using templates, +a six letter random identifier. And just in case that you do not have that much +entropy left on your system, Tmp will fall back to pseudo random numbers. + +You can set whether you want to remove the temporary file on process exit or +not. + +If you do not want to store your temporary directories and files in the +standard OS temporary directory, then you are free to override that as well. + +## An Important Note on Compatibility + +See the [CHANGELOG](./CHANGELOG.md) for more information. + +### Version 0.1.0 + +Since version 0.1.0, all support for node versions < 0.10.0 has been dropped. + +Most importantly, any support for earlier versions of node-tmp was also dropped. + +If you still require node versions < 0.10.0, then you must limit your node-tmp +dependency to versions below 0.1.0. + +### Version 0.0.33 + +Since version 0.0.33, all support for node versions < 0.8 has been dropped. + +If you still require node version 0.8, then you must limit your node-tmp +dependency to version 0.0.33. + +For node versions < 0.8 you must limit your node-tmp dependency to +versions < 0.0.33. + +## How to install + +```bash +npm install tmp +``` + +## Usage + +Please also check [API docs][4]. + +### Asynchronous file creation + +Simple temporary file creation, the file will be closed and unlinked on process exit. + +```javascript +const tmp = require('tmp'); + +tmp.file(function _tempFileCreated(err, path, fd, cleanupCallback) { + if (err) throw err; + + console.log('File: ', path); + console.log('Filedescriptor: ', fd); + + // If we don't need the file anymore we could manually call the cleanupCallback + // But that is not necessary if we didn't pass the keep option because the library + // will clean after itself. + cleanupCallback(); +}); +``` + +### Synchronous file creation + +A synchronous version of the above. + +```javascript +const tmp = require('tmp'); + +const tmpobj = tmp.fileSync(); +console.log('File: ', tmpobj.name); +console.log('Filedescriptor: ', tmpobj.fd); + +// If we don't need the file anymore we could manually call the removeCallback +// But that is not necessary if we didn't pass the keep option because the library +// will clean after itself. +tmpobj.removeCallback(); +``` + +Note that this might throw an exception if either the maximum limit of retries +for creating a temporary name fails, or, in case that you do not have the permission +to write to the directory where the temporary file should be created in. + +### Asynchronous directory creation + +Simple temporary directory creation, it will be removed on process exit. + +If the directory still contains items on process exit, then it won't be removed. + +```javascript +const tmp = require('tmp'); + +tmp.dir(function _tempDirCreated(err, path, cleanupCallback) { + if (err) throw err; + + console.log('Dir: ', path); + + // Manual cleanup + cleanupCallback(); +}); +``` + +If you want to cleanup the directory even when there are entries in it, then +you can pass the `unsafeCleanup` option when creating it. + +### Synchronous directory creation + +A synchronous version of the above. + +```javascript +const tmp = require('tmp'); + +const tmpobj = tmp.dirSync(); +console.log('Dir: ', tmpobj.name); +// Manual cleanup +tmpobj.removeCallback(); +``` + +Note that this might throw an exception if either the maximum limit of retries +for creating a temporary name fails, or, in case that you do not have the permission +to write to the directory where the temporary directory should be created in. + +### Asynchronous filename generation + +It is possible with this library to generate a unique filename in the specified +directory. + +```javascript +const tmp = require('tmp'); + +tmp.tmpName(function _tempNameGenerated(err, path) { + if (err) throw err; + + console.log('Created temporary filename: ', path); +}); +``` + +### Synchronous filename generation + +A synchronous version of the above. + +```javascript +const tmp = require('tmp'); + +const name = tmp.tmpNameSync(); +console.log('Created temporary filename: ', name); +``` + +## Advanced usage + +### Asynchronous file creation + +Creates a file with mode `0644`, prefix will be `prefix-` and postfix will be `.txt`. + +```javascript +const tmp = require('tmp'); + +tmp.file({ mode: 0o644, prefix: 'prefix-', postfix: '.txt' }, function _tempFileCreated(err, path, fd) { + if (err) throw err; + + console.log('File: ', path); + console.log('Filedescriptor: ', fd); +}); +``` + +### Synchronous file creation + +A synchronous version of the above. + +```javascript +const tmp = require('tmp'); + +const tmpobj = tmp.fileSync({ mode: 0o644, prefix: 'prefix-', postfix: '.txt' }); +console.log('File: ', tmpobj.name); +console.log('Filedescriptor: ', tmpobj.fd); +``` + +### Controlling the Descriptor + +As a side effect of creating a unique file `tmp` gets a file descriptor that is +returned to the user as the `fd` parameter. The descriptor may be used by the +application and is closed when the `removeCallback` is invoked. + +In some use cases the application does not need the descriptor, needs to close it +without removing the file, or needs to remove the file without closing the +descriptor. Two options control how the descriptor is managed: + +* `discardDescriptor` - if `true` causes `tmp` to close the descriptor after the file + is created. In this case the `fd` parameter is undefined. +* `detachDescriptor` - if `true` causes `tmp` to return the descriptor in the `fd` + parameter, but it is the application's responsibility to close it when it is no + longer needed. + +```javascript +const tmp = require('tmp'); + +tmp.file({ discardDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) { + if (err) throw err; + // fd will be undefined, allowing application to use fs.createReadStream(path) + // without holding an unused descriptor open. +}); +``` + +```javascript +const tmp = require('tmp'); + +tmp.file({ detachDescriptor: true }, function _tempFileCreated(err, path, fd, cleanupCallback) { + if (err) throw err; + + cleanupCallback(); + // Application can store data through fd here; the space used will automatically + // be reclaimed by the operating system when the descriptor is closed or program + // terminates. +}); +``` + +### Asynchronous directory creation + +Creates a directory with mode `0755`, prefix will be `myTmpDir_`. + +```javascript +const tmp = require('tmp'); + +tmp.dir({ mode: 0o750, prefix: 'myTmpDir_' }, function _tempDirCreated(err, path) { + if (err) throw err; + + console.log('Dir: ', path); +}); +``` + +### Synchronous directory creation + +Again, a synchronous version of the above. + +```javascript +const tmp = require('tmp'); + +const tmpobj = tmp.dirSync({ mode: 0750, prefix: 'myTmpDir_' }); +console.log('Dir: ', tmpobj.name); +``` + +### mkstemp like, asynchronously + +Creates a new temporary directory with mode `0700` and filename like `/tmp/tmp-nk2J1u`. + +IMPORTANT NOTE: template no longer accepts a path. Use the dir option instead if you +require tmp to create your temporary filesystem object in a different place than the +default `tmp.tmpdir`. + +```javascript +const tmp = require('tmp'); + +tmp.dir({ template: 'tmp-XXXXXX' }, function _tempDirCreated(err, path) { + if (err) throw err; + + console.log('Dir: ', path); +}); +``` + +### mkstemp like, synchronously + +This will behave similarly to the asynchronous version. + +```javascript +const tmp = require('tmp'); + +const tmpobj = tmp.dirSync({ template: 'tmp-XXXXXX' }); +console.log('Dir: ', tmpobj.name); +``` + +### Asynchronous filename generation + +Using `tmpName()` you can create temporary file names asynchronously. +The function accepts all standard options, e.g. `prefix`, `postfix`, `dir`, and so on. + +You can also leave out the options altogether and just call the function with a callback as first parameter. + +```javascript +const tmp = require('tmp'); + +const options = {}; + +tmp.tmpName(options, function _tempNameGenerated(err, path) { + if (err) throw err; + + console.log('Created temporary filename: ', path); +}); +``` + +### Synchronous filename generation + +The `tmpNameSync()` function works similarly to `tmpName()`. +Again, you can leave out the options altogether and just invoke the function without any parameters. + +```javascript +const tmp = require('tmp'); +const options = {}; +const tmpname = tmp.tmpNameSync(options); +console.log('Created temporary filename: ', tmpname); +``` + +## Graceful cleanup + +If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the +temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary +object removal. + +To enforce this, you can call the `setGracefulCleanup()` method: + +```javascript +const tmp = require('tmp'); + +tmp.setGracefulCleanup(); +``` + +## Options + +All options are optional :) + + * `name`: a fixed name that overrides random name generation, the name must be relative and must not contain path segments + * `mode`: the file mode to create with, falls back to `0o600` on file creation and `0o700` on directory creation + * `prefix`: the optional prefix, defaults to `tmp` + * `postfix`: the optional postfix + * `template`: [`mkstemp`][3] like filename template, no default, can be either an absolute or a relative path that resolves + to a relative path of the system's default temporary directory, must include `XXXXXX` once for random name generation, e.g. + 'foo/bar/XXXXXX'. Absolute paths are also fine as long as they are relative to os.tmpdir(). + Any directories along the so specified path must exist, otherwise a ENOENT error will be thrown upon access, + as tmp will not check the availability of the path, nor will it establish the requested path for you. + * `dir`: the optional temporary directory that must be relative to the system's default temporary directory. + absolute paths are fine as long as they point to a location under the system's default temporary directory. + Any directories along the so specified path must exist, otherwise a ENOENT error will be thrown upon access, + as tmp will not check the availability of the path, nor will it establish the requested path for you. + * `tmpdir`: allows you to override the system's root tmp directory + * `tries`: how many times should the function try to get a unique filename before giving up, default `3` + * `keep`: signals that the temporary file or directory should not be deleted on exit, default is `false` + * In order to clean up, you will have to call the provided `cleanupCallback` function manually. + * `unsafeCleanup`: recursively removes the created temporary directory, even when it's not empty. default is `false` + * `detachDescriptor`: detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection + * `discardDescriptor`: discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection + +[1]: http://nodejs.org/ +[2]: https://www.npmjs.com/browse/depended/tmp +[3]: http://www.kernel.org/doc/man-pages/online/pages/man3/mkstemp.3.html +[4]: https://raszi.github.io/node-tmp/ +[5]: https://github.com/benjamingr/tmp-promise diff --git a/node_modules/tmp/lib/tmp.js b/node_modules/tmp/lib/tmp.js new file mode 100644 index 00000000..b41c29d4 --- /dev/null +++ b/node_modules/tmp/lib/tmp.js @@ -0,0 +1,780 @@ +/*! + * Tmp + * + * Copyright (c) 2011-2017 KARASZI Istvan + * + * MIT Licensed + */ + +/* + * Module dependencies. + */ +const fs = require('fs'); +const os = require('os'); +const path = require('path'); +const crypto = require('crypto'); +const _c = { fs: fs.constants, os: os.constants }; +const rimraf = require('rimraf'); + +/* + * The working inner variables. + */ +const + // the random characters to choose from + RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', + + TEMPLATE_PATTERN = /XXXXXX/, + + DEFAULT_TRIES = 3, + + CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), + + // constants are off on the windows platform and will not match the actual errno codes + IS_WIN32 = os.platform() === 'win32', + EBADF = _c.EBADF || _c.os.errno.EBADF, + ENOENT = _c.ENOENT || _c.os.errno.ENOENT, + + DIR_MODE = 0o700 /* 448 */, + FILE_MODE = 0o600 /* 384 */, + + EXIT = 'exit', + + // this will hold the objects need to be removed on exit + _removeObjects = [], + + // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback + FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), + FN_RIMRAF_SYNC = rimraf.sync; + +let + _gracefulCleanup = false; + +/** + * Gets a temporary file name. + * + * @param {(Options|tmpNameCallback)} options options or callback + * @param {?tmpNameCallback} callback the callback function + */ +function tmpName(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + try { + _assertAndSanitizeOptions(opts); + } catch (err) { + return cb(err); + } + + let tries = opts.tries; + (function _getUniqueName() { + try { + const name = _generateTmpName(opts); + + // check whether the path exists then retry if needed + fs.stat(name, function (err) { + /* istanbul ignore else */ + if (!err) { + /* istanbul ignore else */ + if (tries-- > 0) return _getUniqueName(); + + return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); + } + + cb(null, name); + }); + } catch (err) { + cb(err); + } + }()); +} + +/** + * Synchronous version of tmpName. + * + * @param {Object} options + * @returns {string} the generated random name + * @throws {Error} if the options are invalid or could not generate a filename + */ +function tmpNameSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + _assertAndSanitizeOptions(opts); + + let tries = opts.tries; + do { + const name = _generateTmpName(opts); + try { + fs.statSync(name); + } catch (e) { + return name; + } + } while (tries-- > 0); + + throw new Error('Could not get a unique tmp filename, max tries reached'); +} + +/** + * Creates and opens a temporary file. + * + * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined + * @param {?fileCallback} callback + */ +function file(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create and open the file + fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { + /* istanbu ignore else */ + if (err) return cb(err); + + if (opts.discardDescriptor) { + return fs.close(fd, function _discardCallback(possibleErr) { + // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only + return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); + }); + } else { + // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care + // about the descriptor + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); + } + }); + }); +} + +/** + * Synchronous version of file. + * + * @param {Options} options + * @returns {FileSyncObject} object consists of name, fd and removeCallback + * @throws {Error} if cannot create a file + */ +function fileSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + const name = tmpNameSync(opts); + var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + /* istanbul ignore else */ + if (opts.discardDescriptor) { + fs.closeSync(fd); + fd = undefined; + } + + return { + name: name, + fd: fd, + removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) + }; +} + +/** + * Creates a temporary directory. + * + * @param {(Options|dirCallback)} options the options or the callback function + * @param {?dirCallback} callback + */ +function dir(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create the directory + fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { + /* istanbul ignore else */ + if (err) return cb(err); + + cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); + }); + }); +} + +/** + * Synchronous version of dir. + * + * @param {Options} options + * @returns {DirSyncObject} object consists of name and removeCallback + * @throws {Error} if it cannot create a directory + */ +function dirSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + const name = tmpNameSync(opts); + fs.mkdirSync(name, opts.mode || DIR_MODE); + + return { + name: name, + removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) + }; +} + +/** + * Removes files asynchronously. + * + * @param {Object} fdPath + * @param {Function} next + * @private + */ +function _removeFileAsync(fdPath, next) { + const _handler = function (err) { + if (err && !_isENOENT(err)) { + // reraise any unanticipated error + return next(err); + } + next(); + }; + + if (0 <= fdPath[0]) + fs.close(fdPath[0], function () { + fs.unlink(fdPath[1], _handler); + }); + else fs.unlink(fdPath[1], _handler); +} + +/** + * Removes files synchronously. + * + * @param {Object} fdPath + * @private + */ +function _removeFileSync(fdPath) { + let rethrownException = null; + try { + if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); + } catch (e) { + // reraise any unanticipated error + if (!_isEBADF(e) && !_isENOENT(e)) throw e; + } finally { + try { + fs.unlinkSync(fdPath[1]); + } + catch (e) { + // reraise any unanticipated error + if (!_isENOENT(e)) rethrownException = e; + } + } + if (rethrownException !== null) { + throw rethrownException; + } +} + +/** + * Prepares the callback for removal of the temporary file. + * + * Returns either a sync callback or a async callback depending on whether + * fileSync or file was called, which is expressed by the sync parameter. + * + * @param {string} name the path of the file + * @param {number} fd file descriptor + * @param {Object} opts + * @param {boolean} sync + * @returns {fileCallback | fileCallbackSync} + * @private + */ +function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { + const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); + const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); + + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return sync ? removeCallbackSync : removeCallback; +} + +/** + * Prepares the callback for removal of the temporary directory. + * + * Returns either a sync callback or a async callback depending on whether + * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. + * + * @param {string} name + * @param {Object} opts + * @param {boolean} sync + * @returns {Function} the callback + * @private + */ +function _prepareTmpDirRemoveCallback(name, opts, sync) { + const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); + const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; + const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); + const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return sync ? removeCallbackSync : removeCallback; +} + +/** + * Creates a guarded function wrapping the removeFunction call. + * + * The cleanup callback is save to be called multiple times. + * Subsequent invocations will be ignored. + * + * @param {Function} removeFunction + * @param {string} fileOrDirName + * @param {boolean} sync + * @param {cleanupCallbackSync?} cleanupCallbackSync + * @returns {cleanupCallback | cleanupCallbackSync} + * @private + */ +function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { + let called = false; + + // if sync is true, the next parameter will be ignored + return function _cleanupCallback(next) { + + /* istanbul ignore else */ + if (!called) { + // remove cleanupCallback from cache + const toRemove = cleanupCallbackSync || _cleanupCallback; + const index = _removeObjects.indexOf(toRemove); + /* istanbul ignore else */ + if (index >= 0) _removeObjects.splice(index, 1); + + called = true; + if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { + return removeFunction(fileOrDirName); + } else { + return removeFunction(fileOrDirName, next || function() {}); + } + } + }; +} + +/** + * The garbage collector. + * + * @private + */ +function _garbageCollector() { + /* istanbul ignore else */ + if (!_gracefulCleanup) return; + + // the function being called removes itself from _removeObjects, + // loop until _removeObjects is empty + while (_removeObjects.length) { + try { + _removeObjects[0](); + } catch (e) { + // already removed? + } + } +} + +/** + * Random name generator based on crypto. + * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript + * + * @param {number} howMany + * @returns {string} the generated random name + * @private + */ +function _randomChars(howMany) { + let + value = [], + rnd = null; + + // make sure that we do not fail because we ran out of entropy + try { + rnd = crypto.randomBytes(howMany); + } catch (e) { + rnd = crypto.pseudoRandomBytes(howMany); + } + + for (var i = 0; i < howMany; i++) { + value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); + } + + return value.join(''); +} + +/** + * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * + * @private + * @param {string} s + * @returns {Boolean} true whether the string s is blank, false otherwise + */ +function _isBlank(s) { + return s === null || _isUndefined(s) || !s.trim(); +} + +/** + * Checks whether the `obj` parameter is defined or not. + * + * @param {Object} obj + * @returns {boolean} true if the object is undefined + * @private + */ +function _isUndefined(obj) { + return typeof obj === 'undefined'; +} + +/** + * Parses the function arguments. + * + * This function helps to have optional arguments. + * + * @param {(Options|null|undefined|Function)} options + * @param {?Function} callback + * @returns {Array} parsed arguments + * @private + */ +function _parseArguments(options, callback) { + /* istanbul ignore else */ + if (typeof options === 'function') { + return [{}, options]; + } + + /* istanbul ignore else */ + if (_isUndefined(options)) { + return [{}, callback]; + } + + // copy options so we do not leak the changes we make internally + const actualOptions = {}; + for (const key of Object.getOwnPropertyNames(options)) { + actualOptions[key] = options[key]; + } + + return [actualOptions, callback]; +} + +/** + * Generates a new temporary name. + * + * @param {Object} opts + * @returns {string} the new random name according to opts + * @private + */ +function _generateTmpName(opts) { + + const tmpDir = opts.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(opts.name)) + return path.join(tmpDir, opts.dir, opts.name); + + /* istanbul ignore else */ + if (!_isUndefined(opts.template)) + return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + + // prefix and postfix + const name = [ + opts.prefix ? opts.prefix : 'tmp', + '-', + process.pid, + '-', + _randomChars(12), + opts.postfix ? '-' + opts.postfix : '' + ].join(''); + + return path.join(tmpDir, opts.dir, name); +} + +/** + * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing + * options. + * + * @param {Options} options + * @private + */ +function _assertAndSanitizeOptions(options) { + + options.tmpdir = _getTmpDir(options); + + const tmpDir = options.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(options.name)) + _assertIsRelative(options.name, 'name', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.dir)) + _assertIsRelative(options.dir, 'dir', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.template)) { + _assertIsRelative(options.template, 'template', tmpDir); + if (!options.template.match(TEMPLATE_PATTERN)) + throw new Error(`Invalid template, found "${options.template}".`); + } + /* istanbul ignore else */ + if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) + throw new Error(`Invalid tries, found "${options.tries}".`); + + // if a name was specified we will try once + options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; + options.keep = !!options.keep; + options.detachDescriptor = !!options.detachDescriptor; + options.discardDescriptor = !!options.discardDescriptor; + options.unsafeCleanup = !!options.unsafeCleanup; + + // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); + options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); + // sanitize further if template is relative to options.dir + options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); + + // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); + options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; + options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; +} + +/** + * Resolve the specified path name in respect to tmpDir. + * + * The specified name might include relative path components, e.g. ../ + * so we need to resolve in order to be sure that is is located inside tmpDir + * + * @param name + * @param tmpDir + * @returns {string} + * @private + */ +function _resolvePath(name, tmpDir) { + const sanitizedName = _sanitizeName(name); + if (sanitizedName.startsWith(tmpDir)) { + return path.resolve(sanitizedName); + } else { + return path.resolve(path.join(tmpDir, sanitizedName)); + } +} + +/** + * Sanitize the specified path name by removing all quote characters. + * + * @param name + * @returns {string} + * @private + */ +function _sanitizeName(name) { + if (_isBlank(name)) { + return name; + } + return name.replace(/["']/g, ''); +} + +/** + * Asserts whether specified name is relative to the specified tmpDir. + * + * @param {string} name + * @param {string} option + * @param {string} tmpDir + * @throws {Error} + * @private + */ +function _assertIsRelative(name, option, tmpDir) { + if (option === 'name') { + // assert that name is not absolute and does not contain a path + if (path.isAbsolute(name)) + throw new Error(`${option} option must not contain an absolute path, found "${name}".`); + // must not fail on valid . or .. or similar such constructs + let basename = path.basename(name); + if (basename === '..' || basename === '.' || basename !== name) + throw new Error(`${option} option must not contain a path, found "${name}".`); + } + else { // if (option === 'dir' || option === 'template') { + // assert that dir or template are relative to tmpDir + if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { + throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); + } + let resolvedPath = _resolvePath(name, tmpDir); + if (!resolvedPath.startsWith(tmpDir)) + throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); + } +} + +/** + * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isEBADF(error) { + return _isExpectedError(error, -EBADF, 'EBADF'); +} + +/** + * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isENOENT(error) { + return _isExpectedError(error, -ENOENT, 'ENOENT'); +} + +/** + * Helper to determine whether the expected error code matches the actual code and errno, + * which will differ between the supported node versions. + * + * - Node >= 7.0: + * error.code {string} + * error.errno {number} any numerical value will be negated + * + * CAVEAT + * + * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT + * is no different here. + * + * @param {SystemError} error + * @param {number} errno + * @param {string} code + * @private + */ +function _isExpectedError(error, errno, code) { + return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; +} + +/** + * Sets the graceful cleanup. + * + * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the + * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary + * object removals. + */ +function setGracefulCleanup() { + _gracefulCleanup = true; +} + +/** + * Returns the currently configured tmp dir from os.tmpdir(). + * + * @private + * @param {?Options} options + * @returns {string} the currently configured tmp dir + */ +function _getTmpDir(options) { + return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); +} + +// Install process exit listener +process.addListener(EXIT, _garbageCollector); + +/** + * Configuration options. + * + * @typedef {Object} Options + * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected + * @property {?number} tries the number of tries before give up the name generation + * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files + * @property {?string} template the "mkstemp" like filename template + * @property {?string} name fixed name relative to tmpdir or the specified dir option + * @property {?string} dir tmp directory relative to the root tmp directory in use + * @property {?string} prefix prefix for the generated name + * @property {?string} postfix postfix for the generated name + * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir + * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty + * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection + * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection + */ + +/** + * @typedef {Object} FileSyncObject + * @property {string} name the name of the file + * @property {string} fd the file descriptor or -1 if the fd has been discarded + * @property {fileCallback} removeCallback the callback function to remove the file + */ + +/** + * @typedef {Object} DirSyncObject + * @property {string} name the name of the directory + * @property {fileCallback} removeCallback the callback function to remove the directory + */ + +/** + * @callback tmpNameCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + */ + +/** + * @callback fileCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback fileCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + +/** + * @callback dirCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback dirCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallback + * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallbackSync + */ + +/** + * Callback function for function composition. + * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} + * + * @callback simpleCallback + */ + +// exporting all the needed methods + +// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will +// allow users to reconfigure the temporary directory +Object.defineProperty(module.exports, 'tmpdir', { + enumerable: true, + configurable: false, + get: function () { + return _getTmpDir(); + } +}); + +module.exports.dir = dir; +module.exports.dirSync = dirSync; + +module.exports.file = file; +module.exports.fileSync = fileSync; + +module.exports.tmpName = tmpName; +module.exports.tmpNameSync = tmpNameSync; + +module.exports.setGracefulCleanup = setGracefulCleanup; diff --git a/node_modules/tmp/package.json b/node_modules/tmp/package.json new file mode 100644 index 00000000..d98a9108 --- /dev/null +++ b/node_modules/tmp/package.json @@ -0,0 +1,58 @@ +{ + "name": "tmp", + "version": "0.2.1", + "description": "Temporary file and directory creator", + "author": "KARASZI István (http://raszi.hu/)", + "contributors": [ + "Carsten Klein (https://github.com/silkentrance)" + ], + "keywords": [ + "temporary", + "tmp", + "temp", + "tempdir", + "tempfile", + "tmpdir", + "tmpfile" + ], + "license": "MIT", + "repository": "https://github.com/raszi/node-tmp.git", + "homepage": "http://github.com/raszi/node-tmp", + "bugs": { + "url": "http://github.com/raszi/node-tmp/issues" + }, + "engines": { + "node": ">=8.17.0" + }, + "dependencies": { + "rimraf": "^3.0.0" + }, + "devDependencies": { + "eslint": "^6.3.0", + "eslint-plugin-mocha": "^6.1.1", + "istanbul": "^0.4.5", + "lerna-changelog": "^1.0.1", + "mocha": "^6.2.0" + }, + "main": "lib/tmp.js", + "files": [ + "lib/" + ], + "changelog": { + "labels": { + "breaking": ":boom: Breaking Change", + "enhancement": ":rocket: Enhancement", + "bug": ":bug: Bug Fix", + "documentation": ":memo: Documentation", + "internal": ":house: Internal" + }, + "cacheDir": ".changelog" + }, + "scripts": { + "changelog": "lerna-changelog", + "lint": "eslint lib --env mocha test", + "clean": "rm -Rf ./coverage", + "test": "npm run clean && istanbul cover ./node_modules/mocha/bin/_mocha --report none --print none --dir ./coverage/json -u exports -R test/*-test.js && istanbul report --root ./coverage/json html && istanbul report text-summary", + "doc": "jsdoc -c .jsdoc.json" + } +} diff --git a/node_modules/tunnel/.idea/encodings.xml b/node_modules/tunnel/.idea/encodings.xml new file mode 100644 index 00000000..97626ba4 --- /dev/null +++ b/node_modules/tunnel/.idea/encodings.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/modules.xml b/node_modules/tunnel/.idea/modules.xml new file mode 100644 index 00000000..27bf8882 --- /dev/null +++ b/node_modules/tunnel/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/node-tunnel.iml b/node_modules/tunnel/.idea/node-tunnel.iml new file mode 100644 index 00000000..24643cc3 --- /dev/null +++ b/node_modules/tunnel/.idea/node-tunnel.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/vcs.xml b/node_modules/tunnel/.idea/vcs.xml new file mode 100644 index 00000000..94a25f7f --- /dev/null +++ b/node_modules/tunnel/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/node_modules/tunnel/.idea/workspace.xml b/node_modules/tunnel/.idea/workspace.xml new file mode 100644 index 00000000..1a318c8f --- /dev/null +++ b/node_modules/tunnel/.idea/workspace.xml @@ -0,0 +1,797 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + max + onconne + + + + + + + + + + + + + false + + false + false + true + + + true + DEFINITION_ORDER + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +