diff --git a/.github/workflows/check-dist.yml b/.github/workflows/check-dist.yml index c9522913..2e8dd8c3 100644 --- a/.github/workflows/check-dist.yml +++ b/.github/workflows/check-dist.yml @@ -1,4 +1,4 @@ -name: Check dist/ +name: Check dist on: push: diff --git a/.github/workflows/e2e-cache.yml b/.github/workflows/e2e-cache.yml index fab3dcd7..ba3af16b 100644 --- a/.github/workflows/e2e-cache.yml +++ b/.github/workflows/e2e-cache.yml @@ -134,3 +134,112 @@ jobs: - name: Verify node and yarn run: __tests__/verify-node.sh "${{ matrix.node-version }}" shell: bash + + yarn-subprojects: + name: Test yarn subprojects + strategy: + matrix: + node-version: [12, 14, 16] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: prepare sub-projects + run: __tests__/prepare-yarn-subprojects.sh yarn1 + + # expect + # - no errors + # - log + # ##[debug]Cache Paths: + # ##[debug]["sub2/.yarn/cache","sub3/.yarn/cache","../../../.cache/yarn/v6"] + - name: Setup Node + uses: ./ + with: + node-version: ${{ matrix.node-version }} + cache: 'yarn' + cache-dependency-path: | + **/*.lock + yarn.lock + + yarn-subprojects-berry-local: + name: Test yarn subprojects all locally managed + strategy: + matrix: + node-version: [12, 14, 16] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: prepare sub-projects + run: __tests__/prepare-yarn-subprojects.sh keepcache keepcache + + # expect + # - no errors + # - log + # ##[info]All dependencies are managed locally by yarn3, the previous cache can be used + # ##[debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee","node-cache-Linux-yarn"] + - name: Setup Node + uses: ./ + with: + node-version: ${{ matrix.node-version }} + cache: 'yarn' + cache-dependency-path: | + sub2/*.lock + sub3/*.lock + + yarn-subprojects-berry-global: + name: Test yarn subprojects some locally managed + strategy: + matrix: + node-version: [12, 14, 16] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: prepare sub-projects + run: __tests__/prepare-yarn-subprojects.sh global + + # expect + # - no errors + # - log must + # ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" dependencies are managed by yarn 3 locally + # ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" dependencies are not managed by yarn 3 locally + - name: Setup Node + uses: ./ + with: + node-version: ${{ matrix.node-version }} + cache: 'yarn' + cache-dependency-path: | + sub2/*.lock + sub3/*.lock + + yarn-subprojects-berry-git: + name: Test yarn subprojects managed by git + strategy: + matrix: + node-version: [12, 14, 16] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: prepare sub-projects + run: /bin/bash __tests__/prepare-yarn-subprojects.sh keepcache + + # expect + # - no errors + # - log + # [debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" has .yarn/cache - dependencies are kept in the repository + # [debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" has .yarn/cache - dependencies are kept in the repository + # [debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee"] + - name: Setup Node + uses: ./ + with: + node-version: ${{ matrix.node-version }} + cache: 'yarn' + cache-dependency-path: | + sub2/*.lock + sub3/*.lock diff --git a/.github/workflows/versions.yml b/.github/workflows/versions.yml index 082550ca..2d72d409 100644 --- a/.github/workflows/versions.yml +++ b/.github/workflows/versions.yml @@ -158,7 +158,8 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] - node-version-file: [.nvmrc, .tool-versions, package.json] + node-version-file: + [.nvmrc, .tool-versions, .tool-versions-node, package.json] steps: - uses: actions/checkout@v3 - name: Remove volta from package.json diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index d9a80f6b..0ef5c3f6 100644 --- a/.licenses/npm/@actions/cache.dep.yml +++ b/.licenses/npm/@actions/cache.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/cache" -version: 3.0.4 +version: 3.2.1 type: npm summary: Actions cache lib homepage: https://github.com/actions/toolkit/tree/main/packages/cache diff --git a/.licenses/npm/@azure/abort-controller.dep.yml b/.licenses/npm/@azure/abort-controller.dep.yml index f303d5c3..b19b8f7a 100644 --- a/.licenses/npm/@azure/abort-controller.dep.yml +++ b/.licenses/npm/@azure/abort-controller.dep.yml @@ -1,9 +1,9 @@ --- name: "@azure/abort-controller" -version: 1.0.4 +version: 1.1.0 type: npm summary: Microsoft Azure SDK for JavaScript - Aborter -homepage: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/core/abort-controller/README.md +homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md license: mit licenses: - sources: LICENSE diff --git a/.licenses/npm/@azure/ms-rest-js.dep.yml b/.licenses/npm/@azure/ms-rest-js.dep.yml index 869e765a..762fcdb1 100644 --- a/.licenses/npm/@azure/ms-rest-js.dep.yml +++ b/.licenses/npm/@azure/ms-rest-js.dep.yml @@ -1,6 +1,6 @@ --- name: "@azure/ms-rest-js" -version: 2.6.6 +version: 2.7.0 type: npm summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest diff --git a/.licenses/npm/ip-regex.dep.yml b/.licenses/npm/ip-regex.dep.yml deleted file mode 100644 index 95d4b6b5..00000000 --- a/.licenses/npm/ip-regex.dep.yml +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: ip-regex -version: 2.1.0 -type: npm -summary: Regular expression for matching IP addresses (IPv4 & IPv6) -homepage: https://github.com/sindresorhus/ip-regex#readme -license: mit -licenses: -- sources: license - text: | - The MIT License (MIT) - - Copyright (c) Sindre Sorhus (sindresorhus.com) - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. -- sources: readme.md - text: MIT © [Sindre Sorhus](https://sindresorhus.com) -notices: [] diff --git a/.licenses/npm/psl.dep.yml b/.licenses/npm/psl.dep.yml deleted file mode 100644 index 385e9aac..00000000 --- a/.licenses/npm/psl.dep.yml +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: psl -version: 1.8.0 -type: npm -summary: Domain name parser based on the Public Suffix List -homepage: https://github.com/lupomontero/psl#readme -license: mit -licenses: -- sources: LICENSE - text: | - The MIT License (MIT) - - Copyright (c) 2017 Lupo Montero lupomontero@gmail.com - - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -- sources: README.md - text: |- - The MIT License (MIT) - - Copyright (c) 2017 Lupo Montero - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. -notices: [] diff --git a/.licenses/npm/punycode.dep.yml b/.licenses/npm/punycode.dep.yml deleted file mode 100644 index 4a9547e6..00000000 --- a/.licenses/npm/punycode.dep.yml +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: punycode -version: 2.1.1 -type: npm -summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, - and works on nearly all JavaScript platforms. -homepage: https://mths.be/punycode -license: mit -licenses: -- sources: LICENSE-MIT.txt - text: | - Copyright Mathias Bynens - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -- sources: README.md - text: Punycode.js is available under the [MIT](https://mths.be/mit) license. -notices: [] diff --git a/.licenses/npm/semver-6.1.2.dep.yml b/.licenses/npm/semver-6.3.1.dep.yml similarity index 93% rename from .licenses/npm/semver-6.1.2.dep.yml rename to .licenses/npm/semver-6.3.1.dep.yml index 761a3f37..248cb030 100644 --- a/.licenses/npm/semver-6.1.2.dep.yml +++ b/.licenses/npm/semver-6.3.1.dep.yml @@ -1,9 +1,9 @@ --- name: semver -version: 6.1.2 +version: 6.3.1 type: npm summary: The semantic version parser used by npm. -homepage: https://github.com/npm/node-semver#readme +homepage: license: isc licenses: - sources: LICENSE diff --git a/.licenses/npm/tough-cookie.dep.yml b/.licenses/npm/tough-cookie.dep.yml deleted file mode 100644 index 1496c109..00000000 --- a/.licenses/npm/tough-cookie.dep.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -name: tough-cookie -version: 3.0.1 -type: npm -summary: RFC6265 Cookies and Cookie Jar for node.js -homepage: https://github.com/salesforce/tough-cookie -license: bsd-3-clause -licenses: -- sources: LICENSE - text: | - Copyright (c) 2015, Salesforce.com, Inc. - All rights reserved. - - Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - - 3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -notices: [] diff --git a/README.md b/README.md index 362bd89d..8c15b445 100644 --- a/README.md +++ b/README.md @@ -99,7 +99,7 @@ For information regarding locally cached versions of Node.js on GitHub hosted ru ### Supported version syntax -The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to the [documentation](https://github.com/npm/node-semver). +The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to [the semver package documentation](https://github.com/npm/node-semver). Examples: diff --git a/__tests__/authutil.test.ts b/__tests__/authutil.test.ts index 10468080..0676a850 100644 --- a/__tests__/authutil.test.ts +++ b/__tests__/authutil.test.ts @@ -1,9 +1,10 @@ import os from 'os'; -import * as fs from 'fs'; +import fs from 'fs'; import * as path from 'path'; import * as core from '@actions/core'; import * as io from '@actions/io'; import * as auth from '../src/authutil'; +import * as cacheUtils from '../src/cache-utils'; let rcFile: string; diff --git a/__tests__/cache-restore.test.ts b/__tests__/cache-restore.test.ts index df9170ad..90153a40 100644 --- a/__tests__/cache-restore.test.ts +++ b/__tests__/cache-restore.test.ts @@ -32,13 +32,13 @@ describe('cache-restore', () => { function findCacheFolder(command: string) { switch (command) { - case utils.supportedPackageManagers.npm.getCacheFolderCommand: + case 'npm config get cache': return npmCachePath; - case utils.supportedPackageManagers.pnpm.getCacheFolderCommand: + case 'pnpm store path --silent': return pnpmCachePath; - case utils.supportedPackageManagers.yarn1.getCacheFolderCommand: + case 'yarn cache dir': return yarn1CachePath; - case utils.supportedPackageManagers.yarn2.getCacheFolderCommand: + case 'yarn config get cacheFolder': return yarn2CachePath; default: return 'packge/not/found'; @@ -108,7 +108,7 @@ describe('cache-restore', () => { it.each([['npm7'], ['npm6'], ['pnpm6'], ['yarn1'], ['yarn2'], ['random']])( 'Throw an error because %s is not supported', async packageManager => { - await expect(restoreCache(packageManager)).rejects.toThrow( + await expect(restoreCache(packageManager, '')).rejects.toThrow( `Caching for '${packageManager}' is not supported` ); } @@ -132,7 +132,7 @@ describe('cache-restore', () => { } }); - await restoreCache(packageManager); + await restoreCache(packageManager, ''); expect(hashFilesSpy).toHaveBeenCalled(); expect(infoSpy).toHaveBeenCalledWith( `Cache restored from key: node-cache-${platform}-${packageManager}-${fileHash}` @@ -163,7 +163,7 @@ describe('cache-restore', () => { }); restoreCacheSpy.mockImplementationOnce(() => undefined); - await restoreCache(packageManager); + await restoreCache(packageManager, ''); expect(hashFilesSpy).toHaveBeenCalled(); expect(infoSpy).toHaveBeenCalledWith( `${packageManager} cache is not found` diff --git a/__tests__/cache-save.test.ts b/__tests__/cache-save.test.ts index f96cde5a..922566d6 100644 --- a/__tests__/cache-save.test.ts +++ b/__tests__/cache-save.test.ts @@ -107,18 +107,20 @@ describe('run', () => { describe('Validate unchanged cache is not saved', () => { it('should not save cache for yarn1', async () => { inputs['cache'] = 'yarn'; - getStateSpy.mockImplementation(() => yarnFileHash); - getCommandOutputSpy - .mockImplementationOnce(() => '1.2.3') - .mockImplementationOnce(() => `${commonPath}/yarn1`); + getStateSpy.mockImplementation(key => + key === State.CachePrimaryKey || key === State.CacheMatchedKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(2); - expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`); - expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3'); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).toHaveBeenCalledWith( `Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.` ); @@ -127,18 +129,20 @@ describe('run', () => { it('should not save cache for yarn2', async () => { inputs['cache'] = 'yarn'; - getStateSpy.mockImplementation(() => yarnFileHash); - getCommandOutputSpy - .mockImplementationOnce(() => '2.2.3') - .mockImplementationOnce(() => `${commonPath}/yarn2`); + getStateSpy.mockImplementation(key => + key === State.CachePrimaryKey || key === State.CacheMatchedKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(2); - expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`); - expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3'); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).toHaveBeenCalledWith( `Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.` ); @@ -147,35 +151,40 @@ describe('run', () => { it('should not save cache for npm', async () => { inputs['cache'] = 'npm'; - getStateSpy.mockImplementation(() => npmFileHash); + getStateSpy.mockImplementation(key => + key === State.CachePrimaryKey || key === State.CacheMatchedKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); - expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`); - expect(infoSpy).toHaveBeenCalledWith( - `Cache hit occurred on the primary key ${npmFileHash}, not saving cache.` - ); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(setFailedSpy).not.toHaveBeenCalled(); }); it('should not save cache for pnpm', async () => { inputs['cache'] = 'pnpm'; - getStateSpy.mockImplementation(() => pnpmFileHash); - getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`); + getStateSpy.mockImplementation(key => + key === State.CachePrimaryKey || key === State.CacheMatchedKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); - expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`); - expect(infoSpy).toHaveBeenCalledWith( - `Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.` - ); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(setFailedSpy).not.toHaveBeenCalled(); }); }); @@ -183,24 +192,22 @@ describe('run', () => { describe('action saves the cache', () => { it('saves cache from yarn 1', async () => { inputs['cache'] = 'yarn'; - getStateSpy.mockImplementation((name: string) => { - if (name === State.CacheMatchedKey) { - return yarnFileHash; - } else { - return npmFileHash; - } - }); - getCommandOutputSpy - .mockImplementationOnce(() => '1.2.3') - .mockImplementationOnce(() => `${commonPath}/yarn1`); + getStateSpy.mockImplementation((key: string) => + key === State.CacheMatchedKey + ? yarnFileHash + : key === State.CachePrimaryKey + ? npmFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(2); - expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`); - expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3'); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).not.toHaveBeenCalledWith( `Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.` ); @@ -213,24 +220,22 @@ describe('run', () => { it('saves cache from yarn 2', async () => { inputs['cache'] = 'yarn'; - getStateSpy.mockImplementation((name: string) => { - if (name === State.CacheMatchedKey) { - return yarnFileHash; - } else { - return npmFileHash; - } - }); - getCommandOutputSpy - .mockImplementationOnce(() => '2.2.3') - .mockImplementationOnce(() => `${commonPath}/yarn2`); + getStateSpy.mockImplementation((key: string) => + key === State.CacheMatchedKey + ? yarnFileHash + : key === State.CachePrimaryKey + ? npmFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(2); - expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`); - expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3'); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).not.toHaveBeenCalledWith( `Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.` ); @@ -243,21 +248,22 @@ describe('run', () => { it('saves cache from npm', async () => { inputs['cache'] = 'npm'; - getStateSpy.mockImplementation((name: string) => { - if (name === State.CacheMatchedKey) { - return npmFileHash; - } else { - return yarnFileHash; - } - }); - getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`); + getStateSpy.mockImplementation((key: string) => + key === State.CacheMatchedKey + ? npmFileHash + : key === State.CachePrimaryKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); - expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).not.toHaveBeenCalledWith( `Cache hit occurred on the primary key ${npmFileHash}, not saving cache.` ); @@ -270,21 +276,22 @@ describe('run', () => { it('saves cache from pnpm', async () => { inputs['cache'] = 'pnpm'; - getStateSpy.mockImplementation((name: string) => { - if (name === State.CacheMatchedKey) { - return pnpmFileHash; - } else { - return npmFileHash; - } - }); - getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`); + getStateSpy.mockImplementation((key: string) => + key === State.CacheMatchedKey + ? pnpmFileHash + : key === State.CachePrimaryKey + ? npmFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); - expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).not.toHaveBeenCalledWith( `Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.` ); @@ -297,14 +304,15 @@ describe('run', () => { it('save with -1 cacheId , should not fail workflow', async () => { inputs['cache'] = 'npm'; - getStateSpy.mockImplementation((name: string) => { - if (name === State.CacheMatchedKey) { - return npmFileHash; - } else { - return yarnFileHash; - } - }); - getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`); + getStateSpy.mockImplementation((key: string) => + key === State.CacheMatchedKey + ? npmFileHash + : key === State.CachePrimaryKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); saveCacheSpy.mockImplementation(() => { return -1; }); @@ -312,9 +320,9 @@ describe('run', () => { await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); - expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).not.toHaveBeenCalledWith( `Cache hit occurred on the primary key ${npmFileHash}, not saving cache.` ); @@ -327,14 +335,15 @@ describe('run', () => { it('saves with error from toolkit, should fail workflow', async () => { inputs['cache'] = 'npm'; - getStateSpy.mockImplementation((name: string) => { - if (name === State.CacheMatchedKey) { - return npmFileHash; - } else { - return yarnFileHash; - } - }); - getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`); + getStateSpy.mockImplementation((key: string) => + key === State.CacheMatchedKey + ? npmFileHash + : key === State.CachePrimaryKey + ? yarnFileHash + : key === State.CachePaths + ? '["/foo/bar"]' + : 'not expected' + ); saveCacheSpy.mockImplementation(() => { throw new cache.ValidationError('Validation failed'); }); @@ -342,9 +351,9 @@ describe('run', () => { await run(); expect(getInputSpy).toHaveBeenCalled(); - expect(getStateSpy).toHaveBeenCalledTimes(2); - expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); - expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`); + expect(getStateSpy).toHaveBeenCalledTimes(3); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(0); + expect(debugSpy).toHaveBeenCalledTimes(0); expect(infoSpy).not.toHaveBeenCalledWith( `Cache hit occurred on the primary key ${npmFileHash}, not saving cache.` ); diff --git a/__tests__/cache-utils.test.ts b/__tests__/cache-utils.test.ts index 9e8d653d..a8c881e5 100644 --- a/__tests__/cache-utils.test.ts +++ b/__tests__/cache-utils.test.ts @@ -2,7 +2,18 @@ import * as core from '@actions/core'; import * as cache from '@actions/cache'; import path from 'path'; import * as utils from '../src/cache-utils'; -import {PackageManagerInfo, isCacheFeatureAvailable} from '../src/cache-utils'; +import { + PackageManagerInfo, + isCacheFeatureAvailable, + supportedPackageManagers, + getCommandOutput, + resetProjectDirectoriesMemoized +} from '../src/cache-utils'; +import fs from 'fs'; +import * as cacheUtils from '../src/cache-utils'; +import * as glob from '@actions/glob'; +import {Globber} from '@actions/glob'; +import {MockGlobber} from './mock/glob-mock'; describe('cache-utils', () => { const versionYarn1 = '1.2.3'; @@ -12,8 +23,10 @@ describe('cache-utils', () => { let isFeatureAvailable: jest.SpyInstance; let info: jest.SpyInstance; let warningSpy: jest.SpyInstance; + let fsRealPathSyncSpy: jest.SpyInstance; beforeEach(() => { + console.log('::stop-commands::stoptoken'); process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data'); debugSpy = jest.spyOn(core, 'debug'); debugSpy.mockImplementation(msg => {}); @@ -24,13 +37,29 @@ describe('cache-utils', () => { isFeatureAvailable = jest.spyOn(cache, 'isFeatureAvailable'); getCommandOutputSpy = jest.spyOn(utils, 'getCommandOutput'); + + fsRealPathSyncSpy = jest.spyOn(fs, 'realpathSync'); + fsRealPathSyncSpy.mockImplementation(dirName => { + return dirName; + }); }); + afterEach(() => { + jest.resetAllMocks(); + jest.clearAllMocks(); + //jest.restoreAllMocks(); + }); + + afterAll(async () => { + console.log('::stoptoken::'); + jest.restoreAllMocks(); + }, 100000); + describe('getPackageManagerInfo', () => { it.each<[string, PackageManagerInfo | null]>([ ['npm', utils.supportedPackageManagers.npm], ['pnpm', utils.supportedPackageManagers.pnpm], - ['yarn', utils.supportedPackageManagers.yarn1], + ['yarn', utils.supportedPackageManagers.yarn], ['yarn1', null], ['yarn2', null], ['npm7', null] @@ -72,4 +101,263 @@ describe('cache-utils', () => { jest.resetAllMocks(); jest.clearAllMocks(); }); + + describe('getCacheDirectoriesPaths', () => { + let existsSpy: jest.SpyInstance; + let lstatSpy: jest.SpyInstance; + let globCreateSpy: jest.SpyInstance; + + beforeEach(() => { + existsSpy = jest.spyOn(fs, 'existsSync'); + existsSpy.mockImplementation(() => true); + + lstatSpy = jest.spyOn(fs, 'lstatSync'); + lstatSpy.mockImplementation(arg => ({ + isDirectory: () => true + })); + + globCreateSpy = jest.spyOn(glob, 'create'); + + globCreateSpy.mockImplementation( + (pattern: string): Promise => + MockGlobber.create(['/foo', '/bar']) + ); + + resetProjectDirectoriesMemoized(); + }); + + afterEach(() => { + existsSpy.mockRestore(); + lstatSpy.mockRestore(); + globCreateSpy.mockRestore(); + }); + + it.each([ + [supportedPackageManagers.npm, ''], + [supportedPackageManagers.npm, '/dir/file.lock'], + [supportedPackageManagers.npm, '/**/file.lock'], + [supportedPackageManagers.pnpm, ''], + [supportedPackageManagers.pnpm, '/dir/file.lock'], + [supportedPackageManagers.pnpm, '/**/file.lock'] + ])( + 'getCacheDirectoriesPaths should return one dir for non yarn', + async (packageManagerInfo, cacheDependency) => { + getCommandOutputSpy.mockImplementation(() => 'foo'); + + const dirs = await cacheUtils.getCacheDirectories( + packageManagerInfo, + cacheDependency + ); + expect(dirs).toEqual(['foo']); + // to do not call for a version + // call once for get cache folder + expect(getCommandOutputSpy).toHaveBeenCalledTimes(1); + } + ); + + it('getCacheDirectoriesPaths should return one dir for yarn without cacheDependency', async () => { + getCommandOutputSpy.mockImplementation(() => 'foo'); + + const dirs = await cacheUtils.getCacheDirectories( + supportedPackageManagers.yarn, + '' + ); + expect(dirs).toEqual(['foo']); + }); + + it.each([ + [supportedPackageManagers.npm, ''], + [supportedPackageManagers.npm, '/dir/file.lock'], + [supportedPackageManagers.npm, '/**/file.lock'], + [supportedPackageManagers.pnpm, ''], + [supportedPackageManagers.pnpm, '/dir/file.lock'], + [supportedPackageManagers.pnpm, '/**/file.lock'], + [supportedPackageManagers.yarn, ''], + [supportedPackageManagers.yarn, '/dir/file.lock'], + [supportedPackageManagers.yarn, '/**/file.lock'] + ])( + 'getCacheDirectoriesPaths should throw for getCommandOutput returning empty', + async (packageManagerInfo, cacheDependency) => { + getCommandOutputSpy.mockImplementation((command: string) => + // return empty string to indicate getCacheFolderPath failed + // --version still works + command.includes('version') ? '1.' : '' + ); + + await expect( + cacheUtils.getCacheDirectories(packageManagerInfo, cacheDependency) + ).rejects.toThrow(); //'Could not get cache folder path for /dir'); + } + ); + + it.each([ + [supportedPackageManagers.yarn, '/dir/file.lock'], + [supportedPackageManagers.yarn, '/**/file.lock'] + ])( + 'getCacheDirectoriesPaths should nothrow in case of having not directories', + async (packageManagerInfo, cacheDependency) => { + lstatSpy.mockImplementation(arg => ({ + isDirectory: () => false + })); + + await cacheUtils.getCacheDirectories( + packageManagerInfo, + cacheDependency + ); + expect(warningSpy).toHaveBeenCalledTimes(1); + expect(warningSpy).toHaveBeenCalledWith( + `No existing directories found containing cache-dependency-path="${cacheDependency}"` + ); + } + ); + + it.each(['1.1.1', '2.2.2'])( + 'getCacheDirectoriesPaths yarn v%s should return one dir without cacheDependency', + async version => { + getCommandOutputSpy.mockImplementationOnce(() => version); + getCommandOutputSpy.mockImplementationOnce(() => `foo${version}`); + + const dirs = await cacheUtils.getCacheDirectories( + supportedPackageManagers.yarn, + '' + ); + expect(dirs).toEqual([`foo${version}`]); + } + ); + + it.each(['1.1.1', '2.2.2'])( + 'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency', + async version => { + let dirNo = 1; + getCommandOutputSpy.mockImplementation((command: string) => + command.includes('version') ? version : `file_${version}_${dirNo++}` + ); + globCreateSpy.mockImplementation( + (pattern: string): Promise => + MockGlobber.create(['/tmp/dir1/file', '/tmp/dir2/file']) + ); + + const dirs = await cacheUtils.getCacheDirectories( + supportedPackageManagers.yarn, + '/tmp/**/file' + ); + expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]); + } + ); + + it.each(['1.1.1', '2.2.2'])( + 'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency expanding to duplicates', + async version => { + let dirNo = 1; + getCommandOutputSpy.mockImplementation((command: string) => + command.includes('version') ? version : `file_${version}_${dirNo++}` + ); + globCreateSpy.mockImplementation( + (pattern: string): Promise => + MockGlobber.create([ + '/tmp/dir1/file', + '/tmp/dir2/file', + '/tmp/dir1/file' + ]) + ); + + const dirs = await cacheUtils.getCacheDirectories( + supportedPackageManagers.yarn, + '/tmp/**/file' + ); + expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]); + } + ); + + it.each(['1.1.1', '2.2.2'])( + 'getCacheDirectoriesPaths yarn v%s should return 2 uniq dirs despite duplicate cache directories', + async version => { + let dirNo = 1; + getCommandOutputSpy.mockImplementation((command: string) => + command.includes('version') + ? version + : `file_${version}_${dirNo++ % 2}` + ); + globCreateSpy.mockImplementation( + (pattern: string): Promise => + MockGlobber.create([ + '/tmp/dir1/file', + '/tmp/dir2/file', + '/tmp/dir3/file' + ]) + ); + + const dirs = await cacheUtils.getCacheDirectories( + supportedPackageManagers.yarn, + '/tmp/**/file' + ); + expect(dirs).toEqual([`file_${version}_1`, `file_${version}_0`]); + expect(getCommandOutputSpy).toHaveBeenCalledTimes(6); + expect(getCommandOutputSpy).toHaveBeenCalledWith( + 'yarn --version', + '/tmp/dir1' + ); + expect(getCommandOutputSpy).toHaveBeenCalledWith( + 'yarn --version', + '/tmp/dir2' + ); + expect(getCommandOutputSpy).toHaveBeenCalledWith( + 'yarn --version', + '/tmp/dir3' + ); + expect(getCommandOutputSpy).toHaveBeenCalledWith( + version.startsWith('1.') + ? 'yarn cache dir' + : 'yarn config get cacheFolder', + '/tmp/dir1' + ); + expect(getCommandOutputSpy).toHaveBeenCalledWith( + version.startsWith('1.') + ? 'yarn cache dir' + : 'yarn config get cacheFolder', + '/tmp/dir2' + ); + expect(getCommandOutputSpy).toHaveBeenCalledWith( + version.startsWith('1.') + ? 'yarn cache dir' + : 'yarn config get cacheFolder', + '/tmp/dir3' + ); + } + ); + + it.each(['1.1.1', '2.2.2'])( + 'getCacheDirectoriesPaths yarn v%s should return 4 dirs with multiple globs', + async version => { + // simulate wrong indents + const cacheDependencyPath = `/tmp/dir1/file + /tmp/dir2/file +/tmp/**/file + `; + globCreateSpy.mockImplementation( + (pattern: string): Promise => + MockGlobber.create([ + '/tmp/dir1/file', + '/tmp/dir2/file', + '/tmp/dir3/file', + '/tmp/dir4/file' + ]) + ); + let dirNo = 1; + getCommandOutputSpy.mockImplementation((command: string) => + command.includes('version') ? version : `file_${version}_${dirNo++}` + ); + const dirs = await cacheUtils.getCacheDirectories( + supportedPackageManagers.yarn, + cacheDependencyPath + ); + expect(dirs).toEqual([ + `file_${version}_1`, + `file_${version}_2`, + `file_${version}_3`, + `file_${version}_4` + ]); + } + ); + }); }); diff --git a/__tests__/data/.tool-versions-node b/__tests__/data/.tool-versions-node new file mode 100644 index 00000000..8e8ac418 --- /dev/null +++ b/__tests__/data/.tool-versions-node @@ -0,0 +1 @@ +node 14.0.0 diff --git a/__tests__/mock/glob-mock.test.ts b/__tests__/mock/glob-mock.test.ts new file mode 100644 index 00000000..db10ced1 --- /dev/null +++ b/__tests__/mock/glob-mock.test.ts @@ -0,0 +1,18 @@ +import {MockGlobber} from './glob-mock'; + +describe('mocked globber tests', () => { + it('globber should return generator', async () => { + const globber = new MockGlobber(['aaa', 'bbb', 'ccc']); + const generator = globber.globGenerator(); + const result: string[] = []; + for await (const itemPath of generator) { + result.push(itemPath); + } + expect(result).toEqual(['aaa', 'bbb', 'ccc']); + }); + it('globber should return glob', async () => { + const globber = new MockGlobber(['aaa', 'bbb', 'ccc']); + const result: string[] = await globber.glob(); + expect(result).toEqual(['aaa', 'bbb', 'ccc']); + }); +}); diff --git a/__tests__/mock/glob-mock.ts b/__tests__/mock/glob-mock.ts new file mode 100644 index 00000000..a2eabf75 --- /dev/null +++ b/__tests__/mock/glob-mock.ts @@ -0,0 +1,29 @@ +import {Globber} from '@actions/glob'; + +export class MockGlobber implements Globber { + private readonly expected: string[]; + constructor(expected: string[]) { + this.expected = expected; + } + getSearchPaths(): string[] { + return this.expected.slice(); + } + + async glob(): Promise { + const result: string[] = []; + for await (const itemPath of this.globGenerator()) { + result.push(itemPath); + } + return result; + } + + async *globGenerator(): AsyncGenerator { + for (const e of this.expected) { + yield e; + } + } + + static async create(expected: string[]): Promise { + return new MockGlobber(expected); + } +} diff --git a/__tests__/nightly-installer.test.ts b/__tests__/nightly-installer.test.ts index 04a3253a..87c43795 100644 --- a/__tests__/nightly-installer.test.ts +++ b/__tests__/nightly-installer.test.ts @@ -95,6 +95,8 @@ describe('setup-node', () => { res = nodeTestDistRc; } else if (url.includes('/nightly')) { res = nodeTestDistNightly; + } else if (url.includes('/v8-canary')) { + res = nodeV8CanaryTestDist; } else { res = nodeTestDist; } diff --git a/__tests__/prepare-yarn-subprojects.sh b/__tests__/prepare-yarn-subprojects.sh new file mode 100755 index 00000000..30d894cf --- /dev/null +++ b/__tests__/prepare-yarn-subprojects.sh @@ -0,0 +1,59 @@ +#!/bin/sh -e +export YARN_ENABLE_IMMUTABLE_INSTALLS=false +rm package.json +rm package-lock.json +echo "create yarn2 project in the sub2" +mkdir sub2 +cd sub2 +cat <package.json +{ + "name": "subproject", + "dependencies": { + "random": "^3.0.6", + "uuid": "^9.0.0" + } +} +EOT +yarn set version 2.4.3 +yarn install + +echo "create yarn3 project in the sub3" +cd .. +mkdir sub3 +cd sub3 +cat <package.json +{ + "name": "subproject", + "dependencies": { + "random": "^3.0.6", + "uuid": "^9.0.0" + } +} +EOT +yarn set version 3.5.1 +yarn install +if [ x$1 = 'xglobal' ];then + echo enableGlobalCache + echo 'enableGlobalCache: true' >> .yarnrc.yml +fi + +cd .. +if [ x$1 != 'xkeepcache' -a x$2 != 'xkeepcache' ]; then + rm -rf sub2/.yarn/cache + rm -rf sub3/.yarn/cache +fi + +if [ x$1 = 'xyarn1' ];then + echo "create yarn1 project in the root" + cat <package.json +{ + "name": "subproject", + "dependencies": { + "random": "^3.0.6", + "uuid": "^9.0.0" + } +} +EOT + yarn set version 1.22.19 + yarn install +fi \ No newline at end of file diff --git a/action.yml b/action.yml index b22de1ef..56025a40 100644 --- a/action.yml +++ b/action.yml @@ -25,7 +25,7 @@ inputs: description: 'Used to specify a package manager for caching in the default directory. Supported values: npm, yarn, pnpm.' cache-dependency-path: description: 'Used to specify the path to a dependency file: package-lock.json, yarn.lock, etc. Supports wildcards or a list of file names for caching multiple dependencies.' -# TODO: add input to control forcing to pull from cloud or dist. +# TODO: add input to control forcing to pull from cloud or dist. # escape valve for someone having issues or needing the absolute latest which isn't cached yet outputs: cache-hit: diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index a260283c..cfef4635 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -6,6 +6,29 @@ "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -15,14 +38,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(2186)); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); @@ -74,9 +91,10 @@ exports.isFeatureAvailable = isFeatureAvailable; * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ -function restoreCache(paths, primaryKey, restoreKeys, options) { +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); restoreKeys = restoreKeys || []; @@ -94,22 +112,27 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod + compressionMethod, + enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found return undefined; } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core.isDebug()) { - yield tar_1.listTar(archivePath, compressionMethod); + yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield tar_1.extractTar(archivePath, compressionMethod); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info('Cache restored successfully'); return cacheEntry.cacheKey; } @@ -141,10 +164,11 @@ exports.restoreCache = restoreCache; * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ -function saveCache(paths, key, options) { +function saveCache(paths, key, options, enableCrossOsArchive = false) { var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); @@ -161,9 +185,9 @@ function saveCache(paths, key, options) { const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); try { - yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core.isDebug()) { - yield tar_1.listTar(archivePath, compressionMethod); + yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); @@ -175,6 +199,7 @@ function saveCache(paths, key, options) { core.debug('Reserving Cache'); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, + enableCrossOsArchive, cacheSize: archiveFileSize }); if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { @@ -223,6 +248,29 @@ exports.saveCache = saveCache; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -232,14 +280,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(1825); const auth_1 = __nccwpck_require__(2001); @@ -247,7 +289,6 @@ const crypto = __importStar(__nccwpck_require__(6113)); const fs = __importStar(__nccwpck_require__(7147)); const url_1 = __nccwpck_require__(7310); const utils = __importStar(__nccwpck_require__(1518)); -const constants_1 = __nccwpck_require__(8840); const downloadUtils_1 = __nccwpck_require__(5500); const options_1 = __nccwpck_require__(6215); const requestUtils_1 = __nccwpck_require__(3981); @@ -277,10 +318,17 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); } -function getCacheVersion(paths, compressionMethod) { - const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip - ? [] - : [compressionMethod]); +function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths; + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod); + } + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only'); + } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto @@ -292,18 +340,24 @@ exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; - const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } - if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -313,18 +367,34 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = options_1.getDownloadOptions(options); + const downloadOptions = (0, options_1.getDownloadOptions)(options); if (downloadOptions.useAzureSdk && archiveUrl.hostname.endsWith('.blob.core.windows.net')) { // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); } else { // Otherwise, download using the Actions http-client. - yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); } @@ -333,13 +403,13 @@ exports.downloadCache = downloadCache; function reserveCache(key, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const reserveCacheRequest = { key, version, cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; - const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); return response; @@ -363,10 +433,10 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) }; - const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); })); - if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } }); @@ -377,7 +447,7 @@ function uploadFile(httpClient, cacheId, archivePath, options) { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs.openSync(archivePath, 'r'); - const uploadOptions = options_1.getUploadOptions(options); + const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; @@ -412,7 +482,7 @@ function uploadFile(httpClient, cacheId, archivePath, options) { function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; - return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); })); }); @@ -427,7 +497,7 @@ function saveCache(cacheId, archivePath, options) { const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info('Cache saved successfully'); @@ -443,6 +513,29 @@ exports.saveCache = saveCache; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -459,14 +552,8 @@ var __asyncValues = (this && this.__asyncValues) || function (o) { function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; const core = __importStar(__nccwpck_require__(2186)); const exec = __importStar(__nccwpck_require__(1514)); const glob = __importStar(__nccwpck_require__(1597)); @@ -498,7 +585,7 @@ function createTempDirectory() { } tempDirectory = path.join(baseLocation, 'actions', 'temp'); } - const dest = path.join(tempDirectory, uuid_1.v4()); + const dest = path.join(tempDirectory, (0, uuid_1.v4)()); yield io.mkdirP(dest); return dest; }); @@ -551,12 +638,13 @@ function unlinkFile(filePath) { }); } exports.unlinkFile = unlinkFile; -function getVersion(app) { +function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Checking ${app} --version`); let versionOutput = ''; + additionalArgs.push('--version'); + core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { - yield exec.exec(`${app} --version`, [], { + yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { @@ -576,23 +664,14 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } - const versionOutput = yield getVersion('zstd'); + const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); - if (!versionOutput.toLowerCase().includes('zstd command line interface')) { - // zstd is not installed + core.debug(`zstd version: ${version}`); + if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } - else if (!version || semver.lt(version, 'v1.3.2')) { - // zstd is installed but using a version earlier than v1.3.2 - // v1.3.2 is required to use the `--long` options in zstd - return constants_1.CompressionMethod.ZstdWithoutLong; - } else { - return constants_1.CompressionMethod.Zstd; + return constants_1.CompressionMethod.ZstdWithoutLong; } }); } @@ -603,13 +682,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -632,6 +714,7 @@ exports.isGhes = isGhes; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; var CacheFilename; (function (CacheFilename) { CacheFilename["Gzip"] = "cache.tgz"; @@ -645,6 +728,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -653,6 +741,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), @@ -662,6 +756,29 @@ exports.SocketTimeout = 5000; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -671,14 +788,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(1825); const storage_blob_1 = __nccwpck_require__(4100); @@ -813,7 +924,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient('actions/cache'); - const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); @@ -868,7 +979,8 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB // on 64-bit systems), split the download into multiple segments // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. - const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH); + // Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); const fd = fs.openSync(archivePath, 'w'); try { @@ -920,6 +1032,29 @@ const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, voi "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -929,14 +1064,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(1825); const constants_1 = __nccwpck_require__(8840); @@ -1047,6 +1176,29 @@ exports.retryHttpClientResponse = retryHttpClientResponse; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -1056,14 +1208,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTar = exports.extractTar = exports.listTar = void 0; const exec_1 = __nccwpck_require__(1514); const io = __importStar(__nccwpck_require__(7436)); const fs_1 = __nccwpck_require__(7147); @@ -1071,21 +1217,19 @@ const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } - else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -1093,25 +1237,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -1119,91 +1330,119 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, undefined, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) + }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -1216,14 +1455,31 @@ exports.createTar = createTar; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDownloadOptions = exports.getUploadOptions = void 0; const core = __importStar(__nccwpck_require__(2186)); /** * Returns a copy of the upload options with defaults filled in. @@ -1258,7 +1514,8 @@ function getDownloadOptions(copy) { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, - segmentTimeoutInMs: 3600000 + segmentTimeoutInMs: 600000, + lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -1273,6 +1530,9 @@ function getDownloadOptions(copy) { if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && @@ -1285,6 +1545,7 @@ function getDownloadOptions(copy) { core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; @@ -6480,6 +6741,1193 @@ class ExecState extends events.EventEmitter { /***/ }), +/***/ 8090: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashFiles = exports.create = void 0; +const internal_globber_1 = __nccwpck_require__(8298); +const internal_hash_files_1 = __nccwpck_require__(2448); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +/** + * Computes the sha256 hash of a glob + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function hashFiles(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + let followSymbolicLinks = true; + if (options && typeof options.followSymbolicLinks === 'boolean') { + followSymbolicLinks = options.followSymbolicLinks; + } + const globber = yield create(patterns, { followSymbolicLinks }); + return internal_hash_files_1.hashFiles(globber); + }); +} +exports.hashFiles = hashFiles; +//# sourceMappingURL=glob.js.map + +/***/ }), + +/***/ 1026: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOptions = void 0; +const core = __importStar(__nccwpck_require__(2186)); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + matchDirectories: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.matchDirectories === 'boolean') { + result.matchDirectories = copy.matchDirectories; + core.debug(`matchDirectories '${result.matchDirectories}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), + +/***/ 8298: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultGlobber = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const fs = __importStar(__nccwpck_require__(7147)); +const globOptionsHelper = __importStar(__nccwpck_require__(1026)); +const path = __importStar(__nccwpck_require__(1017)); +const patternHelper = __importStar(__nccwpck_require__(9005)); +const internal_match_kind_1 = __nccwpck_require__(1063); +const internal_pattern_1 = __nccwpck_require__(4536); +const internal_search_state_1 = __nccwpck_require__(9117); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map + +/***/ }), + +/***/ 2448: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashFiles = void 0; +const crypto = __importStar(__nccwpck_require__(6113)); +const core = __importStar(__nccwpck_require__(2186)); +const fs = __importStar(__nccwpck_require__(7147)); +const stream = __importStar(__nccwpck_require__(2781)); +const util = __importStar(__nccwpck_require__(3837)); +const path = __importStar(__nccwpck_require__(1017)); +function hashFiles(globber) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + let hasMatch = false; + const githubWorkspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const result = crypto.createHash('sha256'); + let count = 0; + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + core.debug(file); + if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { + core.debug(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); + continue; + } + if (fs.statSync(file).isDirectory()) { + core.debug(`Skip directory '${file}'.`); + continue; + } + const hash = crypto.createHash('sha256'); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs.createReadStream(file), hash); + result.write(hash.digest()); + count++; + if (!hasMatch) { + hasMatch = true; + } + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + result.end(); + if (hasMatch) { + core.debug(`Found ${count} files to hash.`); + return result.digest('hex'); + } + else { + core.debug(`No matches found for glob`); + return ''; + } + }); +} +exports.hashFiles = hashFiles; +//# sourceMappingURL=internal-hash-files.js.map + +/***/ }), + +/***/ 1063: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), + +/***/ 1849: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(__nccwpck_require__(1017)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; + } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; +} +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; +} +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map + +/***/ }), + +/***/ 6836: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Path = void 0; +const path = __importStar(__nccwpck_require__(1017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map + +/***/ }), + +/***/ 9005: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(__nccwpck_require__(1849)); +const internal_match_kind_1 = __nccwpck_require__(1063); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map + +/***/ }), + +/***/ 4536: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Pattern = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const pathHelper = __importStar(__nccwpck_require__(1849)); +const assert_1 = __importDefault(__nccwpck_require__(9491)); +const minimatch_1 = __nccwpck_require__(3973); +const internal_match_kind_1 = __nccwpck_require__(1063); +const internal_path_1 = __nccwpck_require__(6836); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map + +/***/ }), + +/***/ 9117: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map + +/***/ }), + /***/ 1962: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { @@ -6980,19 +8428,18 @@ function copyFile(srcFile, destFile, force) { /***/ }), /***/ 2557: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -var tslib = __nccwpck_require__(9268); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var listenersMap = new WeakMap(); -var abortedMap = new WeakMap(); +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); /** * An aborter instance implements AbortSignal interface, can abort HTTP requests. * @@ -7006,8 +8453,8 @@ var abortedMap = new WeakMap(); * await doAsyncWork(AbortSignal.none); * ``` */ -var AbortSignal = /** @class */ (function () { - function AbortSignal() { +class AbortSignal { + constructor() { /** * onabort event listener. */ @@ -7015,74 +8462,65 @@ var AbortSignal = /** @class */ (function () { listenersMap.set(this, []); abortedMap.set(this, false); } - Object.defineProperty(AbortSignal.prototype, "aborted", { - /** - * Status of whether aborted or not. - * - * @readonly - */ - get: function () { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); - }, - enumerable: false, - configurable: true - }); - Object.defineProperty(AbortSignal, "none", { - /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly - */ - get: function () { - return new AbortSignal(); - }, - enumerable: false, - configurable: true - }); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } /** * Added new "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be added */ - AbortSignal.prototype.addEventListener = function ( + addEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); + const listeners = listenersMap.get(this); listeners.push(listener); - }; + } /** * Remove "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be removed */ - AbortSignal.prototype.removeEventListener = function ( + removeEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); - var index = listeners.indexOf(listener); + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); if (index > -1) { listeners.splice(index, 1); } - }; + } /** * Dispatches a synthetic event to the AbortSignal. */ - AbortSignal.prototype.dispatchEvent = function (_event) { + dispatchEvent(_event) { throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - }; - return AbortSignal; -}()); + } +} /** * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. * Will try to trigger abort event for all linked AbortSignal nodes. @@ -7100,12 +8538,12 @@ function abortSignal(signal) { if (signal.onabort) { signal.onabort.call(signal); } - var listeners = listenersMap.get(signal); + const listeners = listenersMap.get(signal); if (listeners) { // Create a copy of listeners so mutations to the array // (e.g. via removeListener calls) don't affect the listeners // we invoke. - listeners.slice().forEach(function (listener) { + listeners.slice().forEach((listener) => { listener.call(signal, { type: "abort" }); }); } @@ -7131,15 +8569,12 @@ function abortSignal(signal) { * } * ``` */ -var AbortError = /** @class */ (function (_super) { - tslib.__extends(AbortError, _super); - function AbortError(message) { - var _this = _super.call(this, message) || this; - _this.name = "AbortError"; - return _this; +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } - return AbortError; -}(Error)); +} /** * An AbortController provides an AbortSignal and the associated controls to signal * that an asynchronous operation should be aborted. @@ -7174,10 +8609,9 @@ var AbortError = /** @class */ (function (_super) { * await doAsyncWork(aborter.withTimeout(25 * 1000)); * ``` */ -var AbortController = /** @class */ (function () { +class AbortController { // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - function AbortController(parentSignals) { - var _this = this; + constructor(parentSignals) { this._signal = new AbortSignal(); if (!parentSignals) { return; @@ -7187,8 +8621,7 @@ var AbortController = /** @class */ (function () { // eslint-disable-next-line prefer-rest-params parentSignals = arguments; } - for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) { - var parentSignal = parentSignals_1[_i]; + for (const parentSignal of parentSignals) { // if the parent signal has already had abort() called, // then call abort on this signal as well. if (parentSignal.aborted) { @@ -7196,47 +8629,42 @@ var AbortController = /** @class */ (function () { } else { // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", function () { - _this.abort(); + parentSignal.addEventListener("abort", () => { + this.abort(); }); } } } - Object.defineProperty(AbortController.prototype, "signal", { - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get: function () { - return this._signal; - }, - enumerable: false, - configurable: true - }); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } /** * Signal that any operations passed this controller's associated abort signal * to cancel any remaining work and throw an `AbortError`. */ - AbortController.prototype.abort = function () { + abort() { abortSignal(this._signal); - }; + } /** * Creates a new AbortSignal instance that will abort after the provided ms. * @param ms - Elapsed time in milliseconds to trigger an abort. */ - AbortController.timeout = function (ms) { - var signal = new AbortSignal(); - var timer = setTimeout(abortSignal, ms, signal); + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); // Prevent the active Timer from keeping the Node.js event loop active. if (typeof timer.unref === "function") { timer.unref(); } return signal; - }; - return AbortController; -}()); + } +} exports.AbortController = AbortController; exports.AbortError = AbortError; @@ -7244,319 +8672,6 @@ exports.AbortSignal = AbortSignal; //# sourceMappingURL=index.js.map -/***/ }), - -/***/ 9268: -/***/ ((module) => { - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, System, Reflect, Promise */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __createBinding; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); -}); - - /***/ }), /***/ 2356: @@ -52307,78 +53422,111 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + // The actual regexps go on exports.re var re = exports.re = [] +var safeRe = exports.safeRe = [] var src = exports.src = [] +var t = exports.tokens = {} var R = 0 +function tok (n) { + t[n] = R++ +} + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. // ## Numeric Identifier // A single `0`, or a non-zero digit followed by zero or more digits. -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' // ## Main Version // Three dot-separated numeric identifiers. -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' // ## Pre-release Version Identifier // A numeric identifier, or a non-numeric identifier. -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' // ## Pre-release Version // Hyphen, followed by one or more dot-separated pre-release version // identifiers. -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' // ## Build Metadata Identifier // Any combination of digits, letters, or hyphens. -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata // identifiers. -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' // ## Full Version String // A main version, followed optionally by a pre-release version and @@ -52389,129 +53537,137 @@ src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + // capturing group, because it should not ever be used in version // comparison. -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' -src[FULL] = '^' + FULLPLAIN + '$' +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty // common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' // Something like "2.*" or "1.2.x". // Note that "x.x" is a valid xRange identifer, meaning "any version" // Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + ')?)?' -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + ')?)?' -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' // Coercion. // Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') // Tilde ranges. // Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') var tildeTrimReplace = '$1~' -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' // Caret ranges. // Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') var caretTrimReplace = '$1^' -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' // A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' // An expression to strip any whitespace between the gtlt and the thing // it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' // this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` // Note that these all use the loose form, because they'll be // checked against either the strict or loose comparator form // later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + + '(' + src[t.XRANGEPLAIN] + ')' + '\\s*$' -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + '\\s*$' // Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' // Compile to actual regexp objects. // All are flag-free, unless they were created above with a flag. @@ -52519,6 +53675,14 @@ for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) } } @@ -52543,7 +53707,7 @@ function parse (version, options) { return null } - var r = options.loose ? re[LOOSE] : re[FULL] + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] if (!r.test(version)) { return null } @@ -52598,7 +53762,7 @@ function SemVer (version, options) { this.options = options this.loose = !!options.loose - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) @@ -53043,6 +54207,7 @@ function Comparator (comp, options) { return new Comparator(comp, options) } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -53059,7 +54224,7 @@ function Comparator (comp, options) { var ANY = {} Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var m = comp.match(r) if (!m) { @@ -53183,9 +54348,16 @@ function Range (range, options) { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason @@ -53193,7 +54365,7 @@ function Range (range, options) { }) if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) + throw new TypeError('Invalid SemVer Range: ' + this.raw) } this.format() @@ -53212,20 +54384,19 @@ Range.prototype.toString = function () { Range.prototype.parseRange = function (range) { var loose = this.options.loose - range = range.trim() // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') @@ -53233,7 +54404,7 @@ Range.prototype.parseRange = function (range) { // At this point, the range is completely trimmed and // ready to be split into comparators. - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) @@ -53333,7 +54504,7 @@ function replaceTildes (comp, options) { } function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret @@ -53374,7 +54545,7 @@ function replaceCarets (comp, options) { function replaceCaret (comp, options) { debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret @@ -53433,7 +54604,7 @@ function replaceXRanges (comp, options) { function replaceXRange (comp, options) { comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) @@ -53445,10 +54616,14 @@ function replaceXRange (comp, options) { gtlt = '' } + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + if (xM) { if (gtlt === '>' || gtlt === '<') { // nothing is allowed - ret = '<0.0.0' + ret = '<0.0.0-0' } else { // nothing is forbidden ret = '*' @@ -53485,11 +54660,12 @@ function replaceXRange (comp, options) { } } - ret = gtlt + M + '.' + m + '.' + p + ret = gtlt + M + '.' + m + '.' + p + pr } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr } debug('xRange return', ret) @@ -53503,10 +54679,10 @@ function replaceXRange (comp, options) { function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') + return comp.trim().replace(safeRe[t.STAR], '') } -// This function is passed to string.replace(re[HYPHENRANGE]) +// This function is passed to string.replace(re[t.HYPHENRANGE]) // M, m, patch, prerelease, build // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do @@ -53817,19 +54993,49 @@ function coerce (version, options) { return version } + if (typeof version === 'number') { + version = String(version) + } + if (typeof version !== 'string') { return null } - var match = version.match(re[COERCE]) + options = options || {} - if (match == null) { + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { return null } - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0'), options) + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) } @@ -59126,87 +60332,91 @@ exports.debug = debug; // for test /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.run = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const cache = __importStar(__nccwpck_require__(7799)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in -// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to -// throw an uncaught exception. Instead of failing this action, just warn. -process.on('uncaughtException', e => { - const warningPrefix = '[warning]'; - core.info(`${warningPrefix}${e.message}`); -}); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const cacheLock = core.getInput('cache'); - yield cachePackages(cacheLock); - } - catch (error) { - core.setFailed(error.message); - } - }); -} -exports.run = run; -const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const state = core.getState(constants_1.State.CacheMatchedKey); - const primaryKey = core.getState(constants_1.State.CachePrimaryKey); - const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); - if (!packageManagerInfo) { - core.debug(`Caching for '${packageManager}' is not supported`); - return; - } - const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); - if (!fs_1.default.existsSync(cachePath)) { - throw new Error(`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`); - } - if (primaryKey === state) { - core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); - return; - } - const cacheId = yield cache.saveCache([cachePath], primaryKey); - if (cacheId == -1) { - return; - } - core.info(`Cache saved with the key: ${primaryKey}`); -}); -run(); + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const cache = __importStar(__nccwpck_require__(7799)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const constants_1 = __nccwpck_require__(9042); +const cache_utils_1 = __nccwpck_require__(1678); +// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in +// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to +// throw an uncaught exception. Instead of failing this action, just warn. +process.on('uncaughtException', e => { + const warningPrefix = '[warning]'; + core.info(`${warningPrefix}${e.message}`); +}); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const cacheLock = core.getInput('cache'); + yield cachePackages(cacheLock); + } + catch (error) { + core.setFailed(error.message); + } + }); +} +exports.run = run; +const cachePackages = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + const state = core.getState(constants_1.State.CacheMatchedKey); + const primaryKey = core.getState(constants_1.State.CachePrimaryKey); + let cachePaths = JSON.parse(core.getState(constants_1.State.CachePaths) || '[]'); + cachePaths = cachePaths.filter(fs_1.default.existsSync); + const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); + if (!packageManagerInfo) { + core.debug(`Caching for '${packageManager}' is not supported`); + return; + } + if (!cachePaths.length) { + // TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?) + // export declare function getInput(name: string, options?: InputOptions): string; + const cacheDependencyPath = core.getInput('cache-dependency-path') || ''; + throw new Error(`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`); + } + if (primaryKey === state) { + core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + const cacheId = yield cache.saveCache(cachePaths, primaryKey); + if (cacheId == -1) { + return; + } + core.info(`Cache saved with the key: ${primaryKey}`); +}); +run(); /***/ }), @@ -59215,123 +60425,254 @@ run(); /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.isGhes = exports.getCacheDirectoryPath = exports.getPackageManagerInfo = exports.getCommandOutput = exports.supportedPackageManagers = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -exports.supportedPackageManagers = { - npm: { - lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' - }, - pnpm: { - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path --silent' - }, - yarn1: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' - } -}; -const getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); - } - return stdout.trim(); -}); -exports.getCommandOutput = getCommandOutput; -const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); - if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); - } - return stdOut; -}); -const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; - } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; - } - else if (packageManager === 'yarn') { - const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); - core.debug(`Consumed yarn version is ${yarnVersion}`); - if (yarnVersion.startsWith('1.')) { - return exports.supportedPackageManagers.yarn1; - } - else { - return exports.supportedPackageManagers.yarn2; - } - } - else { - return null; - } -}); -exports.getPackageManagerInfo = getPackageManagerInfo; -const getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); - } - core.debug(`${packageManager} path is ${stdOut}`); - return stdOut.trim(); -}); -exports.getCacheDirectoryPath = getCacheDirectoryPath; -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (cache.isFeatureAvailable()) - return true; - if (isGhes()) { - core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - return false; - } - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - return false; -} -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCacheFeatureAvailable = exports.isGhes = exports.repoHasYarnBerryManagedDependencies = exports.getCacheDirectories = exports.resetProjectDirectoriesMemoized = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const cache = __importStar(__nccwpck_require__(7799)); +const glob = __importStar(__nccwpck_require__(8090)); +const path_1 = __importDefault(__nccwpck_require__(1017)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const util_1 = __nccwpck_require__(2629); +exports.supportedPackageManagers = { + npm: { + name: 'npm', + lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], + getCacheFolderPath: () => exports.getCommandOutputNotEmpty('npm config get cache', 'Could not get npm cache folder path') + }, + pnpm: { + name: 'pnpm', + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderPath: () => exports.getCommandOutputNotEmpty('pnpm store path --silent', 'Could not get pnpm cache folder path') + }, + yarn: { + name: 'yarn', + lockFilePatterns: ['yarn.lock'], + getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () { + const yarnVersion = yield exports.getCommandOutputNotEmpty(`yarn --version`, 'Could not retrieve version of yarn', projectDir); + core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`); + const stdOut = yarnVersion.startsWith('1.') + ? yield exports.getCommandOutput('yarn cache dir', projectDir) + : yield exports.getCommandOutput('yarn config get cacheFolder', projectDir); + if (!stdOut) { + throw new Error(`Could not get yarn cache folder path for ${projectDir}`); + } + return stdOut; + }) + } +}; +const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd }))); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +exports.getCommandOutput = getCommandOutput; +const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = exports.getCommandOutput(toolCommand, cwd); + if (!stdOut) { + throw new Error(error); + } + return stdOut; +}); +exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty; +const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + return exports.supportedPackageManagers.yarn; + } + else { + return null; + } +}); +exports.getPackageManagerInfo = getPackageManagerInfo; +/** + * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` + * - first through `getCacheDirectories` + * - second from `repoHasYarn3ManagedCache` + * + * it contains expensive IO operation and thus should be memoized + */ +let projectDirectoriesMemoized = null; +/** + * unit test must reset memoized variables + */ +const resetProjectDirectoriesMemoized = () => (projectDirectoriesMemoized = null); +exports.resetProjectDirectoriesMemoized = resetProjectDirectoriesMemoized; +/** + * Expands (converts) the string input `cache-dependency-path` to list of directories that + * may be project roots + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of directories and possible + */ +const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (projectDirectoriesMemoized !== null) { + return projectDirectoriesMemoized; + } + const globber = yield glob.create(cacheDependencyPath); + const cacheDependenciesPaths = yield globber.glob(); + const existingDirectories = cacheDependenciesPaths + .map(path_1.default.dirname) + .filter(util_1.unique()) + .map(dirName => fs_1.default.realpathSync(dirName)) + .filter(directory => fs_1.default.lstatSync(directory).isDirectory()); + if (!existingDirectories.length) + core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`); + projectDirectoriesMemoized = existingDirectories; + return existingDirectories; +}); +/** + * Finds the cache directories configured for the repo if cache-dependency-path is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath); + const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(projectDirectory); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`); + return cacheFolderPath; + }))); + // uniq in order to do not cache the same directories twice + return cacheFoldersPaths.filter(util_1.unique()); +}); +/** + * Finds the cache directories configured for the repo ignoring cache-dependency-path + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @return list of files on which the cache depends + */ +const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`); + return [cacheFolderPath]; +}); +/** + * A function to find the cache directories configured for the repo + * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project + // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 + if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { + return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath); + } + return getCacheDirectoriesForRootProject(packageManagerInfo); +}); +exports.getCacheDirectories = getCacheDirectories; +/** + * A function to check if the directory is a yarn project configured to manage + * obsolete dependencies in the local cache + * @param directory - a path to the folder + * @return - true if the directory's project is yarn managed + * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false + * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false + * - if local cache is not explicitly enabled (not yarn3), return false + * - return true otherwise + */ +const projectHasYarnBerryManagedDependencies = (directory) => __awaiter(void 0, void 0, void 0, function* () { + const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; + core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); + // if .yarn/cache directory exists the cache is managed by version control system + const yarnCacheFile = path_1.default.join(workDir, '.yarn', 'cache'); + if (fs_1.default.existsSync(yarnCacheFile) && + fs_1.default.lstatSync(yarnCacheFile).isDirectory()) { + core.debug(`"${workDir}" has .yarn/cache - dependencies are kept in the repository`); + return Promise.resolve(false); + } + // NOTE: yarn1 returns 'undefined' with return code = 0 + const enableGlobalCache = yield exports.getCommandOutput('yarn config get enableGlobalCache', workDir); + // only local cache is not managed by yarn + const managed = enableGlobalCache.includes('false'); + if (managed) { + core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); + return true; + } + else { + core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); + return false; + } +}); +/** + * A function to report the repo contains Yarn managed projects + * @param packageManagerInfo - used to make sure current package manager is yarn + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return - true if all project directories configured to be Yarn managed + */ +const repoHasYarnBerryManagedDependencies = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManagerInfo.name !== 'yarn') + return false; + const yarnDirs = cacheDependencyPath + ? yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) + : ['']; + const isManagedList = yield Promise.all(yarnDirs.map(projectHasYarnBerryManagedDependencies)); + return isManagedList.every(Boolean); +}); +exports.repoHasYarnBerryManagedDependencies = repoHasYarnBerryManagedDependencies; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (cache.isFeatureAvailable()) + return true; + if (isGhes()) { + core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + return false; + } + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + return false; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ }), @@ -59340,24 +60681,135 @@ exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Outputs = exports.State = exports.LockType = void 0; -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType = exports.LockType || (exports.LockType = {})); -var State; -(function (State) { - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs = exports.Outputs || (exports.Outputs = {})); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Outputs = exports.State = exports.LockType = void 0; +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType = exports.LockType || (exports.LockType = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; + State["CachePaths"] = "CACHE_PATHS"; +})(State = exports.State || (exports.State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); + + +/***/ }), + +/***/ 2629: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unique = exports.printEnvDetailsAndSetOutput = exports.parseNodeVersionFile = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +function parseNodeVersionFile(contents) { + var _a, _b, _c; + let nodeVersion; + // Try parsing the file as an NPM `package.json` file. + try { + nodeVersion = (_a = JSON.parse(contents).volta) === null || _a === void 0 ? void 0 : _a.node; + if (!nodeVersion) + nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; + } + catch (_d) { + core.info('Node version file is not JSON file'); + } + if (!nodeVersion) { + const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); + nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; + } + // In the case of an unknown format, + // return as is and evaluate the version separately. + if (!nodeVersion) + nodeVersion = contents.trim(); + return nodeVersion; +} +exports.parseNodeVersionFile = parseNodeVersionFile; +function printEnvDetailsAndSetOutput() { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Environment details'); + const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { + const output = yield getToolVersion(tool, ['--version']); + return { tool, output }; + })); + const tools = yield Promise.all(promises); + tools.forEach(({ tool, output }) => { + if (tool === 'node') { + core.setOutput(`${tool}-version`, output); + } + core.info(`${tool}: ${output}`); + }); + core.endGroup(); + }); +} +exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; +function getToolVersion(tool, options) { + return __awaiter(this, void 0, void 0, function* () { + try { + const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { + ignoreReturnCode: true, + silent: true + }); + if (exitCode > 0) { + core.info(`[warning]${stderr}`); + return ''; + } + return stdout.trim(); + } + catch (err) { + return ''; + } + }); +} +const unique = () => { + const encountered = new Set(); + return (value) => { + if (encountered.has(value)) + return false; + encountered.add(value); + return true; + }; +}; +exports.unique = unique; /***/ }), diff --git a/dist/setup/index.js b/dist/setup/index.js index 51aaaa16..00774971 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -6,6 +6,29 @@ "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -15,14 +38,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(2186)); const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); @@ -74,9 +91,10 @@ exports.isFeatureAvailable = isFeatureAvailable; * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options + * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ -function restoreCache(paths, primaryKey, restoreKeys, options) { +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); restoreKeys = restoreKeys || []; @@ -94,22 +112,27 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod + compressionMethod, + enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found return undefined; } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info('Lookup only - skipping download'); + return cacheEntry.cacheKey; + } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core.isDebug()) { - yield tar_1.listTar(archivePath, compressionMethod); + yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield tar_1.extractTar(archivePath, compressionMethod); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info('Cache restored successfully'); return cacheEntry.cacheKey; } @@ -141,10 +164,11 @@ exports.restoreCache = restoreCache; * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache + * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ -function saveCache(paths, key, options) { +function saveCache(paths, key, options, enableCrossOsArchive = false) { var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); @@ -161,9 +185,9 @@ function saveCache(paths, key, options) { const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); try { - yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core.isDebug()) { - yield tar_1.listTar(archivePath, compressionMethod); + yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); @@ -175,6 +199,7 @@ function saveCache(paths, key, options) { core.debug('Reserving Cache'); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, + enableCrossOsArchive, cacheSize: archiveFileSize }); if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { @@ -223,6 +248,29 @@ exports.saveCache = saveCache; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -232,14 +280,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(1825); const auth_1 = __nccwpck_require__(5936); @@ -247,7 +289,6 @@ const crypto = __importStar(__nccwpck_require__(6113)); const fs = __importStar(__nccwpck_require__(7147)); const url_1 = __nccwpck_require__(7310); const utils = __importStar(__nccwpck_require__(1518)); -const constants_1 = __nccwpck_require__(8840); const downloadUtils_1 = __nccwpck_require__(5500); const options_1 = __nccwpck_require__(6215); const requestUtils_1 = __nccwpck_require__(3981); @@ -277,10 +318,17 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); } -function getCacheVersion(paths, compressionMethod) { - const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip - ? [] - : [compressionMethod]); +function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths; + // Add compression method to cache version to restore + // compressed cache as per compression method + if (compressionMethod) { + components.push(compressionMethod); + } + // Only check for windows platforms if enableCrossOsArchive is false + if (process.platform === 'win32' && !enableCrossOsArchive) { + components.push('windows-only'); + } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto @@ -292,18 +340,24 @@ exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; - const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } - if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -313,18 +367,34 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = options_1.getDownloadOptions(options); + const downloadOptions = (0, options_1.getDownloadOptions)(options); if (downloadOptions.useAzureSdk && archiveUrl.hostname.endsWith('.blob.core.windows.net')) { // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); } else { // Otherwise, download using the Actions http-client. - yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); } @@ -333,13 +403,13 @@ exports.downloadCache = downloadCache; function reserveCache(key, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const reserveCacheRequest = { key, version, cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; - const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); return response; @@ -363,10 +433,10 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) }; - const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); })); - if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } }); @@ -377,7 +447,7 @@ function uploadFile(httpClient, cacheId, archivePath, options) { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs.openSync(archivePath, 'r'); - const uploadOptions = options_1.getUploadOptions(options); + const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; @@ -412,7 +482,7 @@ function uploadFile(httpClient, cacheId, archivePath, options) { function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; - return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); })); }); @@ -427,7 +497,7 @@ function saveCache(cacheId, archivePath, options) { const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info('Cache saved successfully'); @@ -443,6 +513,29 @@ exports.saveCache = saveCache; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -459,14 +552,8 @@ var __asyncValues = (this && this.__asyncValues) || function (o) { function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; const core = __importStar(__nccwpck_require__(2186)); const exec = __importStar(__nccwpck_require__(1514)); const glob = __importStar(__nccwpck_require__(1597)); @@ -498,7 +585,7 @@ function createTempDirectory() { } tempDirectory = path.join(baseLocation, 'actions', 'temp'); } - const dest = path.join(tempDirectory, uuid_1.v4()); + const dest = path.join(tempDirectory, (0, uuid_1.v4)()); yield io.mkdirP(dest); return dest; }); @@ -551,12 +638,13 @@ function unlinkFile(filePath) { }); } exports.unlinkFile = unlinkFile; -function getVersion(app) { +function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { - core.debug(`Checking ${app} --version`); let versionOutput = ''; + additionalArgs.push('--version'); + core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { - yield exec.exec(`${app} --version`, [], { + yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { @@ -576,23 +664,14 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } - const versionOutput = yield getVersion('zstd'); + const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); - if (!versionOutput.toLowerCase().includes('zstd command line interface')) { - // zstd is not installed + core.debug(`zstd version: ${version}`); + if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } - else if (!version || semver.lt(version, 'v1.3.2')) { - // zstd is installed but using a version earlier than v1.3.2 - // v1.3.2 is required to use the `--long` options in zstd - return constants_1.CompressionMethod.ZstdWithoutLong; - } else { - return constants_1.CompressionMethod.Zstd; + return constants_1.CompressionMethod.ZstdWithoutLong; } }); } @@ -603,13 +682,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -632,6 +714,7 @@ exports.isGhes = isGhes; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; var CacheFilename; (function (CacheFilename) { CacheFilename["Gzip"] = "cache.tgz"; @@ -645,6 +728,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -653,6 +741,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), @@ -662,6 +756,29 @@ exports.SocketTimeout = 5000; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -671,14 +788,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(1825); const storage_blob_1 = __nccwpck_require__(4100); @@ -813,7 +924,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient('actions/cache'); - const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); @@ -868,7 +979,8 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB // on 64-bit systems), split the download into multiple segments // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. - const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH); + // Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); const fd = fs.openSync(archivePath, 'w'); try { @@ -920,6 +1032,29 @@ const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, voi "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -929,14 +1064,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core = __importStar(__nccwpck_require__(2186)); const http_client_1 = __nccwpck_require__(1825); const constants_1 = __nccwpck_require__(8840); @@ -1047,6 +1176,29 @@ exports.retryHttpClientResponse = retryHttpClientResponse; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -1056,14 +1208,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createTar = exports.extractTar = exports.listTar = void 0; const exec_1 = __nccwpck_require__(1514); const io = __importStar(__nccwpck_require__(7436)); const fs_1 = __nccwpck_require__(7147); @@ -1071,21 +1217,19 @@ const path = __importStar(__nccwpck_require__(1017)); const utils = __importStar(__nccwpck_require__(1518)); const constants_1 = __nccwpck_require__(8840); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } - else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -1093,25 +1237,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -1119,91 +1330,119 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, undefined, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) + }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -1216,14 +1455,31 @@ exports.createTar = createTar; "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDownloadOptions = exports.getUploadOptions = void 0; const core = __importStar(__nccwpck_require__(2186)); /** * Returns a copy of the upload options with defaults filled in. @@ -1258,7 +1514,8 @@ function getDownloadOptions(copy) { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, - segmentTimeoutInMs: 3600000 + segmentTimeoutInMs: 600000, + lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { @@ -1273,6 +1530,9 @@ function getDownloadOptions(copy) { if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } + if (typeof copy.lookupOnly === 'boolean') { + result.lookupOnly = copy.lookupOnly; + } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && @@ -1285,6 +1545,7 @@ function getDownloadOptions(copy) { core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; @@ -9630,19 +9891,18 @@ function _getGlobal(key, defaultValue) { /***/ }), /***/ 2557: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -var tslib = __nccwpck_require__(9268); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -var listenersMap = new WeakMap(); -var abortedMap = new WeakMap(); +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); /** * An aborter instance implements AbortSignal interface, can abort HTTP requests. * @@ -9656,8 +9916,8 @@ var abortedMap = new WeakMap(); * await doAsyncWork(AbortSignal.none); * ``` */ -var AbortSignal = /** @class */ (function () { - function AbortSignal() { +class AbortSignal { + constructor() { /** * onabort event listener. */ @@ -9665,74 +9925,65 @@ var AbortSignal = /** @class */ (function () { listenersMap.set(this, []); abortedMap.set(this, false); } - Object.defineProperty(AbortSignal.prototype, "aborted", { - /** - * Status of whether aborted or not. - * - * @readonly - */ - get: function () { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); - }, - enumerable: false, - configurable: true - }); - Object.defineProperty(AbortSignal, "none", { - /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly - */ - get: function () { - return new AbortSignal(); - }, - enumerable: false, - configurable: true - }); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } /** * Added new "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be added */ - AbortSignal.prototype.addEventListener = function ( + addEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); + const listeners = listenersMap.get(this); listeners.push(listener); - }; + } /** * Remove "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be removed */ - AbortSignal.prototype.removeEventListener = function ( + removeEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } - var listeners = listenersMap.get(this); - var index = listeners.indexOf(listener); + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); if (index > -1) { listeners.splice(index, 1); } - }; + } /** * Dispatches a synthetic event to the AbortSignal. */ - AbortSignal.prototype.dispatchEvent = function (_event) { + dispatchEvent(_event) { throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - }; - return AbortSignal; -}()); + } +} /** * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. * Will try to trigger abort event for all linked AbortSignal nodes. @@ -9750,12 +10001,12 @@ function abortSignal(signal) { if (signal.onabort) { signal.onabort.call(signal); } - var listeners = listenersMap.get(signal); + const listeners = listenersMap.get(signal); if (listeners) { // Create a copy of listeners so mutations to the array // (e.g. via removeListener calls) don't affect the listeners // we invoke. - listeners.slice().forEach(function (listener) { + listeners.slice().forEach((listener) => { listener.call(signal, { type: "abort" }); }); } @@ -9781,15 +10032,12 @@ function abortSignal(signal) { * } * ``` */ -var AbortError = /** @class */ (function (_super) { - tslib.__extends(AbortError, _super); - function AbortError(message) { - var _this = _super.call(this, message) || this; - _this.name = "AbortError"; - return _this; +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } - return AbortError; -}(Error)); +} /** * An AbortController provides an AbortSignal and the associated controls to signal * that an asynchronous operation should be aborted. @@ -9824,10 +10072,9 @@ var AbortError = /** @class */ (function (_super) { * await doAsyncWork(aborter.withTimeout(25 * 1000)); * ``` */ -var AbortController = /** @class */ (function () { +class AbortController { // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - function AbortController(parentSignals) { - var _this = this; + constructor(parentSignals) { this._signal = new AbortSignal(); if (!parentSignals) { return; @@ -9837,8 +10084,7 @@ var AbortController = /** @class */ (function () { // eslint-disable-next-line prefer-rest-params parentSignals = arguments; } - for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) { - var parentSignal = parentSignals_1[_i]; + for (const parentSignal of parentSignals) { // if the parent signal has already had abort() called, // then call abort on this signal as well. if (parentSignal.aborted) { @@ -9846,47 +10092,42 @@ var AbortController = /** @class */ (function () { } else { // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", function () { - _this.abort(); + parentSignal.addEventListener("abort", () => { + this.abort(); }); } } } - Object.defineProperty(AbortController.prototype, "signal", { - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get: function () { - return this._signal; - }, - enumerable: false, - configurable: true - }); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } /** * Signal that any operations passed this controller's associated abort signal * to cancel any remaining work and throw an `AbortError`. */ - AbortController.prototype.abort = function () { + abort() { abortSignal(this._signal); - }; + } /** * Creates a new AbortSignal instance that will abort after the provided ms. * @param ms - Elapsed time in milliseconds to trigger an abort. */ - AbortController.timeout = function (ms) { - var signal = new AbortSignal(); - var timer = setTimeout(abortSignal, ms, signal); + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); // Prevent the active Timer from keeping the Node.js event loop active. if (typeof timer.unref === "function") { timer.unref(); } return signal; - }; - return AbortController; -}()); + } +} exports.AbortController = AbortController; exports.AbortError = AbortError; @@ -9894,319 +10135,6 @@ exports.AbortSignal = AbortSignal; //# sourceMappingURL=index.js.map -/***/ }), - -/***/ 9268: -/***/ ((module) => { - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, System, Reflect, Promise */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __createBinding; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); -}); - - /***/ }), /***/ 2356: @@ -63581,78 +63509,111 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + // The actual regexps go on exports.re var re = exports.re = [] +var safeRe = exports.safeRe = [] var src = exports.src = [] +var t = exports.tokens = {} var R = 0 +function tok (n) { + t[n] = R++ +} + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. // ## Numeric Identifier // A single `0`, or a non-zero digit followed by zero or more digits. -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' // ## Main Version // Three dot-separated numeric identifiers. -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' // ## Pre-release Version Identifier // A numeric identifier, or a non-numeric identifier. -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' // ## Pre-release Version // Hyphen, followed by one or more dot-separated pre-release version // identifiers. -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' // ## Build Metadata Identifier // Any combination of digits, letters, or hyphens. -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata // identifiers. -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' // ## Full Version String // A main version, followed optionally by a pre-release version and @@ -63663,129 +63624,137 @@ src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + // capturing group, because it should not ever be used in version // comparison. -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' -src[FULL] = '^' + FULLPLAIN + '$' +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty // common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' // Something like "2.*" or "1.2.x". // Note that "x.x" is a valid xRange identifer, meaning "any version" // Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + ')?)?' -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + ')?)?' -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' // Coercion. // Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') // Tilde ranges. // Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') var tildeTrimReplace = '$1~' -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' // Caret ranges. // Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') var caretTrimReplace = '$1^' -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' // A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' // An expression to strip any whitespace between the gtlt and the thing // it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' // this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` // Note that these all use the loose form, because they'll be // checked against either the strict or loose comparator form // later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + + '(' + src[t.XRANGEPLAIN] + ')' + '\\s*$' -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + '\\s*$' // Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' // Compile to actual regexp objects. // All are flag-free, unless they were created above with a flag. @@ -63793,6 +63762,14 @@ for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) } } @@ -63817,7 +63794,7 @@ function parse (version, options) { return null } - var r = options.loose ? re[LOOSE] : re[FULL] + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] if (!r.test(version)) { return null } @@ -63872,7 +63849,7 @@ function SemVer (version, options) { this.options = options this.loose = !!options.loose - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) @@ -64317,6 +64294,7 @@ function Comparator (comp, options) { return new Comparator(comp, options) } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -64333,7 +64311,7 @@ function Comparator (comp, options) { var ANY = {} Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var m = comp.match(r) if (!m) { @@ -64457,9 +64435,16 @@ function Range (range, options) { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason @@ -64467,7 +64452,7 @@ function Range (range, options) { }) if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) + throw new TypeError('Invalid SemVer Range: ' + this.raw) } this.format() @@ -64486,20 +64471,19 @@ Range.prototype.toString = function () { Range.prototype.parseRange = function (range) { var loose = this.options.loose - range = range.trim() // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') @@ -64507,7 +64491,7 @@ Range.prototype.parseRange = function (range) { // At this point, the range is completely trimmed and // ready to be split into comparators. - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) @@ -64607,7 +64591,7 @@ function replaceTildes (comp, options) { } function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret @@ -64648,7 +64632,7 @@ function replaceCarets (comp, options) { function replaceCaret (comp, options) { debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret @@ -64707,7 +64691,7 @@ function replaceXRanges (comp, options) { function replaceXRange (comp, options) { comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) @@ -64719,10 +64703,14 @@ function replaceXRange (comp, options) { gtlt = '' } + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + if (xM) { if (gtlt === '>' || gtlt === '<') { // nothing is allowed - ret = '<0.0.0' + ret = '<0.0.0-0' } else { // nothing is forbidden ret = '*' @@ -64759,11 +64747,12 @@ function replaceXRange (comp, options) { } } - ret = gtlt + M + '.' + m + '.' + p + ret = gtlt + M + '.' + m + '.' + p + pr } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr } debug('xRange return', ret) @@ -64777,10 +64766,10 @@ function replaceXRange (comp, options) { function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') + return comp.trim().replace(safeRe[t.STAR], '') } -// This function is passed to string.replace(re[HYPHENRANGE]) +// This function is passed to string.replace(re[t.HYPHENRANGE]) // M, m, patch, prerelease, build // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do @@ -65091,19 +65080,49 @@ function coerce (version, options) { return version } + if (typeof version === 'number') { + version = String(version) + } + if (typeof version !== 'string') { return null } - var match = version.match(re[COERCE]) + options = options || {} - if (match == null) { + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { return null } - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0'), options) + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) } @@ -71022,73 +71041,73 @@ function wrappy (fn, cb) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.configAuthentication = void 0; -const fs = __importStar(__nccwpck_require__(7147)); -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const core = __importStar(__nccwpck_require__(2186)); -const github = __importStar(__nccwpck_require__(5438)); -function configAuthentication(registryUrl, alwaysAuth) { - const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); - if (!registryUrl.endsWith('/')) { - registryUrl += '/'; - } - writeRegistryToFile(registryUrl, npmrc, alwaysAuth); -} -exports.configAuthentication = configAuthentication; -function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { - let scope = core.getInput('scope'); - if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { - scope = github.context.repo.owner; - } - if (scope && scope[0] != '@') { - scope = '@' + scope; - } - if (scope) { - scope = scope.toLowerCase() + ':'; - } - core.debug(`Setting auth in ${fileLocation}`); - let newContents = ''; - if (fs.existsSync(fileLocation)) { - const curContents = fs.readFileSync(fileLocation, 'utf8'); - curContents.split(os.EOL).forEach((line) => { - // Add current contents unless they are setting the registry - if (!line.toLowerCase().startsWith(`${scope}registry`)) { - newContents += line + os.EOL; - } - }); - } - // Remove http: or https: from front of registry. - const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; - const registryString = `${scope}registry=${registryUrl}`; - const alwaysAuthString = `always-auth=${alwaysAuth}`; - newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; - fs.writeFileSync(fileLocation, newContents); - core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); - // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it - core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); -} + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.configAuthentication = void 0; +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const core = __importStar(__nccwpck_require__(2186)); +const github = __importStar(__nccwpck_require__(5438)); +function configAuthentication(registryUrl, alwaysAuth) { + const npmrc = path.resolve(process.env['RUNNER_TEMP'] || process.cwd(), '.npmrc'); + if (!registryUrl.endsWith('/')) { + registryUrl += '/'; + } + writeRegistryToFile(registryUrl, npmrc, alwaysAuth); +} +exports.configAuthentication = configAuthentication; +function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { + let scope = core.getInput('scope'); + if (!scope && registryUrl.indexOf('npm.pkg.github.com') > -1) { + scope = github.context.repo.owner; + } + if (scope && scope[0] != '@') { + scope = '@' + scope; + } + if (scope) { + scope = scope.toLowerCase() + ':'; + } + core.debug(`Setting auth in ${fileLocation}`); + let newContents = ''; + if (fs.existsSync(fileLocation)) { + const curContents = fs.readFileSync(fileLocation, 'utf8'); + curContents.split(os.EOL).forEach((line) => { + // Add current contents unless they are setting the registry + if (!line.toLowerCase().startsWith(`${scope}registry`)) { + newContents += line + os.EOL; + } + }); + } + // Remove http: or https: from front of registry. + const authString = registryUrl.replace(/(^\w+:|^)/, '') + ':_authToken=${NODE_AUTH_TOKEN}'; + const registryString = `${scope}registry=${registryUrl}`; + const alwaysAuthString = `always-auth=${alwaysAuth}`; + newContents += `${authString}${os.EOL}${registryString}${os.EOL}${alwaysAuthString}`; + fs.writeFileSync(fileLocation, newContents); + core.exportVariable('NPM_CONFIG_USERCONFIG', fileLocation); + // Export empty node_auth_token if didn't exist so npm doesn't complain about not being able to find it + core.exportVariable('NODE_AUTH_TOKEN', process.env.NODE_AUTH_TOKEN || 'XXXXX-XXXXX-XXXXX-XXXXX'); +} /***/ }), @@ -71097,84 +71116,94 @@ function writeRegistryToFile(registryUrl, fileLocation, alwaysAuth) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.restoreCache = void 0; -const cache = __importStar(__nccwpck_require__(7799)); -const core = __importStar(__nccwpck_require__(2186)); -const glob = __importStar(__nccwpck_require__(8090)); -const path_1 = __importDefault(__nccwpck_require__(1017)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const constants_1 = __nccwpck_require__(9042); -const cache_utils_1 = __nccwpck_require__(1678); -const restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { - const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); - if (!packageManagerInfo) { - throw new Error(`Caching for '${packageManager}' is not supported`); - } - const platform = process.env.RUNNER_OS; - const cachePath = yield cache_utils_1.getCacheDirectoryPath(packageManagerInfo, packageManager); - const lockFilePath = cacheDependencyPath - ? cacheDependencyPath - : findLockFile(packageManagerInfo); - const fileHash = yield glob.hashFiles(lockFilePath); - if (!fileHash) { - throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); - } - const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`; - core.debug(`primary key is ${primaryKey}`); - core.saveState(constants_1.State.CachePrimaryKey, primaryKey); - const cacheKey = yield cache.restoreCache([cachePath], primaryKey); - core.setOutput('cache-hit', Boolean(cacheKey)); - if (!cacheKey) { - core.info(`${packageManager} cache is not found`); - return; - } - core.saveState(constants_1.State.CacheMatchedKey, cacheKey); - core.info(`Cache restored from key: ${cacheKey}`); -}); -exports.restoreCache = restoreCache; -const findLockFile = (packageManager) => { - const lockFiles = packageManager.lockFilePatterns; - const workspace = process.env.GITHUB_WORKSPACE; - const rootContent = fs_1.default.readdirSync(workspace); - const lockFile = lockFiles.find(item => rootContent.includes(item)); - if (!lockFile) { - throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); - } - return path_1.default.join(workspace, lockFile); -}; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restoreCache = void 0; +const cache = __importStar(__nccwpck_require__(7799)); +const core = __importStar(__nccwpck_require__(2186)); +const glob = __importStar(__nccwpck_require__(8090)); +const path_1 = __importDefault(__nccwpck_require__(1017)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const constants_1 = __nccwpck_require__(9042); +const cache_utils_1 = __nccwpck_require__(1678); +const restoreCache = (packageManager, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const packageManagerInfo = yield cache_utils_1.getPackageManagerInfo(packageManager); + if (!packageManagerInfo) { + throw new Error(`Caching for '${packageManager}' is not supported`); + } + const platform = process.env.RUNNER_OS; + const cachePaths = yield cache_utils_1.getCacheDirectories(packageManagerInfo, cacheDependencyPath); + core.saveState(constants_1.State.CachePaths, cachePaths); + const lockFilePath = cacheDependencyPath + ? cacheDependencyPath + : findLockFile(packageManagerInfo); + const fileHash = yield glob.hashFiles(lockFilePath); + if (!fileHash) { + throw new Error('Some specified paths were not resolved, unable to cache dependencies.'); + } + const keyPrefix = `node-cache-${platform}-${packageManager}`; + const primaryKey = `${keyPrefix}-${fileHash}`; + core.debug(`primary key is ${primaryKey}`); + core.saveState(constants_1.State.CachePrimaryKey, primaryKey); + const isManagedByYarnBerry = yield cache_utils_1.repoHasYarnBerryManagedDependencies(packageManagerInfo, cacheDependencyPath); + let cacheKey; + if (isManagedByYarnBerry) { + core.info('All dependencies are managed locally by yarn3, the previous cache can be used'); + cacheKey = yield cache.restoreCache(cachePaths, primaryKey, [keyPrefix]); + } + else { + cacheKey = yield cache.restoreCache(cachePaths, primaryKey); + } + core.setOutput('cache-hit', Boolean(cacheKey)); + if (!cacheKey) { + core.info(`${packageManager} cache is not found`); + return; + } + core.saveState(constants_1.State.CacheMatchedKey, cacheKey); + core.info(`Cache restored from key: ${cacheKey}`); +}); +exports.restoreCache = restoreCache; +const findLockFile = (packageManager) => { + const lockFiles = packageManager.lockFilePatterns; + const workspace = process.env.GITHUB_WORKSPACE; + const rootContent = fs_1.default.readdirSync(workspace); + const lockFile = lockFiles.find(item => rootContent.includes(item)); + if (!lockFile) { + throw new Error(`Dependencies lock file is not found in ${workspace}. Supported file patterns: ${lockFiles.toString()}`); + } + return path_1.default.join(workspace, lockFile); +}; /***/ }), @@ -71183,123 +71212,254 @@ const findLockFile = (packageManager) => { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.isGhes = exports.getCacheDirectoryPath = exports.getPackageManagerInfo = exports.getCommandOutput = exports.supportedPackageManagers = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -const cache = __importStar(__nccwpck_require__(7799)); -exports.supportedPackageManagers = { - npm: { - lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' - }, - pnpm: { - lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path --silent' - }, - yarn1: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' - } -}; -const getCommandOutput = (toolCommand) => __awaiter(void 0, void 0, void 0, function* () { - let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, { ignoreReturnCode: true }); - if (exitCode) { - stderr = !stderr.trim() - ? `The '${toolCommand}' command failed with exit code: ${exitCode}` - : stderr; - throw new Error(stderr); - } - return stdout.trim(); -}); -exports.getCommandOutput = getCommandOutput; -const getPackageManagerVersion = (packageManager, command) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(`${packageManager} ${command}`); - if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); - } - return stdOut; -}); -const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { - if (packageManager === 'npm') { - return exports.supportedPackageManagers.npm; - } - else if (packageManager === 'pnpm') { - return exports.supportedPackageManagers.pnpm; - } - else if (packageManager === 'yarn') { - const yarnVersion = yield getPackageManagerVersion('yarn', '--version'); - core.debug(`Consumed yarn version is ${yarnVersion}`); - if (yarnVersion.startsWith('1.')) { - return exports.supportedPackageManagers.yarn1; - } - else { - return exports.supportedPackageManagers.yarn2; - } - } - else { - return null; - } -}); -exports.getPackageManagerInfo = getPackageManagerInfo; -const getCacheDirectoryPath = (packageManagerInfo, packageManager) => __awaiter(void 0, void 0, void 0, function* () { - const stdOut = yield exports.getCommandOutput(packageManagerInfo.getCacheFolderCommand); - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); - } - core.debug(`${packageManager} path is ${stdOut}`); - return stdOut.trim(); -}); -exports.getCacheDirectoryPath = getCacheDirectoryPath; -function isGhes() { - const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); - return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; -} -exports.isGhes = isGhes; -function isCacheFeatureAvailable() { - if (cache.isFeatureAvailable()) - return true; - if (isGhes()) { - core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); - return false; - } - core.warning('The runner was not able to contact the cache service. Caching will be skipped'); - return false; -} -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isCacheFeatureAvailable = exports.isGhes = exports.repoHasYarnBerryManagedDependencies = exports.getCacheDirectories = exports.resetProjectDirectoriesMemoized = exports.getPackageManagerInfo = exports.getCommandOutputNotEmpty = exports.getCommandOutput = exports.supportedPackageManagers = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +const cache = __importStar(__nccwpck_require__(7799)); +const glob = __importStar(__nccwpck_require__(8090)); +const path_1 = __importDefault(__nccwpck_require__(1017)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const util_1 = __nccwpck_require__(2629); +exports.supportedPackageManagers = { + npm: { + name: 'npm', + lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], + getCacheFolderPath: () => exports.getCommandOutputNotEmpty('npm config get cache', 'Could not get npm cache folder path') + }, + pnpm: { + name: 'pnpm', + lockFilePatterns: ['pnpm-lock.yaml'], + getCacheFolderPath: () => exports.getCommandOutputNotEmpty('pnpm store path --silent', 'Could not get pnpm cache folder path') + }, + yarn: { + name: 'yarn', + lockFilePatterns: ['yarn.lock'], + getCacheFolderPath: (projectDir) => __awaiter(void 0, void 0, void 0, function* () { + const yarnVersion = yield exports.getCommandOutputNotEmpty(`yarn --version`, 'Could not retrieve version of yarn', projectDir); + core.debug(`Consumed yarn version is ${yarnVersion} (working dir: "${projectDir || ''}")`); + const stdOut = yarnVersion.startsWith('1.') + ? yield exports.getCommandOutput('yarn cache dir', projectDir) + : yield exports.getCommandOutput('yarn config get cacheFolder', projectDir); + if (!stdOut) { + throw new Error(`Could not get yarn cache folder path for ${projectDir}`); + } + return stdOut; + }) + } +}; +const getCommandOutput = (toolCommand, cwd) => __awaiter(void 0, void 0, void 0, function* () { + let { stdout, stderr, exitCode } = yield exec.getExecOutput(toolCommand, undefined, Object.assign({ ignoreReturnCode: true }, (cwd && { cwd }))); + if (exitCode) { + stderr = !stderr.trim() + ? `The '${toolCommand}' command failed with exit code: ${exitCode}` + : stderr; + throw new Error(stderr); + } + return stdout.trim(); +}); +exports.getCommandOutput = getCommandOutput; +const getCommandOutputNotEmpty = (toolCommand, error, cwd) => __awaiter(void 0, void 0, void 0, function* () { + const stdOut = exports.getCommandOutput(toolCommand, cwd); + if (!stdOut) { + throw new Error(error); + } + return stdOut; +}); +exports.getCommandOutputNotEmpty = getCommandOutputNotEmpty; +const getPackageManagerInfo = (packageManager) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManager === 'npm') { + return exports.supportedPackageManagers.npm; + } + else if (packageManager === 'pnpm') { + return exports.supportedPackageManagers.pnpm; + } + else if (packageManager === 'yarn') { + return exports.supportedPackageManagers.yarn; + } + else { + return null; + } +}); +exports.getPackageManagerInfo = getPackageManagerInfo; +/** + * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` + * - first through `getCacheDirectories` + * - second from `repoHasYarn3ManagedCache` + * + * it contains expensive IO operation and thus should be memoized + */ +let projectDirectoriesMemoized = null; +/** + * unit test must reset memoized variables + */ +const resetProjectDirectoriesMemoized = () => (projectDirectoriesMemoized = null); +exports.resetProjectDirectoriesMemoized = resetProjectDirectoriesMemoized; +/** + * Expands (converts) the string input `cache-dependency-path` to list of directories that + * may be project roots + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of directories and possible + */ +const getProjectDirectoriesFromCacheDependencyPath = (cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (projectDirectoriesMemoized !== null) { + return projectDirectoriesMemoized; + } + const globber = yield glob.create(cacheDependencyPath); + const cacheDependenciesPaths = yield globber.glob(); + const existingDirectories = cacheDependenciesPaths + .map(path_1.default.dirname) + .filter(util_1.unique()) + .map(dirName => fs_1.default.realpathSync(dirName)) + .filter(directory => fs_1.default.lstatSync(directory).isDirectory()); + if (!existingDirectories.length) + core.warning(`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`); + projectDirectoriesMemoized = existingDirectories; + return existingDirectories; +}); +/** + * Finds the cache directories configured for the repo if cache-dependency-path is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectoriesFromCacheDependencyPath = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + const projectDirectories = yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath); + const cacheFoldersPaths = yield Promise.all(projectDirectories.map((projectDirectory) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(projectDirectory); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`); + return cacheFolderPath; + }))); + // uniq in order to do not cache the same directories twice + return cacheFoldersPaths.filter(util_1.unique()); +}); +/** + * Finds the cache directories configured for the repo ignoring cache-dependency-path + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @return list of files on which the cache depends + */ +const getCacheDirectoriesForRootProject = (packageManagerInfo) => __awaiter(void 0, void 0, void 0, function* () { + const cacheFolderPath = yield packageManagerInfo.getCacheFolderPath(); + core.debug(`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`); + return [cacheFolderPath]; +}); +/** + * A function to find the cache directories configured for the repo + * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectories = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project + // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 + if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { + return getCacheDirectoriesFromCacheDependencyPath(packageManagerInfo, cacheDependencyPath); + } + return getCacheDirectoriesForRootProject(packageManagerInfo); +}); +exports.getCacheDirectories = getCacheDirectories; +/** + * A function to check if the directory is a yarn project configured to manage + * obsolete dependencies in the local cache + * @param directory - a path to the folder + * @return - true if the directory's project is yarn managed + * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false + * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false + * - if local cache is not explicitly enabled (not yarn3), return false + * - return true otherwise + */ +const projectHasYarnBerryManagedDependencies = (directory) => __awaiter(void 0, void 0, void 0, function* () { + const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; + core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); + // if .yarn/cache directory exists the cache is managed by version control system + const yarnCacheFile = path_1.default.join(workDir, '.yarn', 'cache'); + if (fs_1.default.existsSync(yarnCacheFile) && + fs_1.default.lstatSync(yarnCacheFile).isDirectory()) { + core.debug(`"${workDir}" has .yarn/cache - dependencies are kept in the repository`); + return Promise.resolve(false); + } + // NOTE: yarn1 returns 'undefined' with return code = 0 + const enableGlobalCache = yield exports.getCommandOutput('yarn config get enableGlobalCache', workDir); + // only local cache is not managed by yarn + const managed = enableGlobalCache.includes('false'); + if (managed) { + core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); + return true; + } + else { + core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); + return false; + } +}); +/** + * A function to report the repo contains Yarn managed projects + * @param packageManagerInfo - used to make sure current package manager is yarn + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return - true if all project directories configured to be Yarn managed + */ +const repoHasYarnBerryManagedDependencies = (packageManagerInfo, cacheDependencyPath) => __awaiter(void 0, void 0, void 0, function* () { + if (packageManagerInfo.name !== 'yarn') + return false; + const yarnDirs = cacheDependencyPath + ? yield getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) + : ['']; + const isManagedList = yield Promise.all(yarnDirs.map(projectHasYarnBerryManagedDependencies)); + return isManagedList.every(Boolean); +}); +exports.repoHasYarnBerryManagedDependencies = repoHasYarnBerryManagedDependencies; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +function isCacheFeatureAvailable() { + if (cache.isFeatureAvailable()) + return true; + if (isGhes()) { + core.warning('Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.'); + return false; + } + core.warning('The runner was not able to contact the cache service. Caching will be skipped'); + return false; +} +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ }), @@ -71308,24 +71468,25 @@ exports.isCacheFeatureAvailable = isCacheFeatureAvailable; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Outputs = exports.State = exports.LockType = void 0; -var LockType; -(function (LockType) { - LockType["Npm"] = "npm"; - LockType["Pnpm"] = "pnpm"; - LockType["Yarn"] = "yarn"; -})(LockType = exports.LockType || (exports.LockType = {})); -var State; -(function (State) { - State["CachePrimaryKey"] = "CACHE_KEY"; - State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); -var Outputs; -(function (Outputs) { - Outputs["CacheHit"] = "cache-hit"; -})(Outputs = exports.Outputs || (exports.Outputs = {})); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Outputs = exports.State = exports.LockType = void 0; +var LockType; +(function (LockType) { + LockType["Npm"] = "npm"; + LockType["Pnpm"] = "pnpm"; + LockType["Yarn"] = "yarn"; +})(LockType = exports.LockType || (exports.LockType = {})); +var State; +(function (State) { + State["CachePrimaryKey"] = "CACHE_KEY"; + State["CacheMatchedKey"] = "CACHE_RESULT"; + State["CachePaths"] = "CACHE_PATHS"; +})(State = exports.State || (exports.State = {})); +var Outputs; +(function (Outputs) { + Outputs["CacheHit"] = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); /***/ }), @@ -71334,73 +71495,73 @@ var Outputs; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tc = __importStar(__nccwpck_require__(7784)); -const semver_1 = __importDefault(__nccwpck_require__(5911)); -const base_distribution_1 = __importDefault(__nccwpck_require__(7)); -class BasePrereleaseNodejs extends base_distribution_1.default { - constructor(nodeInfo) { - super(nodeInfo); - } - findVersionInHostedToolCacheDirectory() { - let toolPath = ''; - const localVersionPaths = tc - .findAllVersions('node', this.nodeInfo.arch) - .filter(i => { - const prerelease = semver_1.default.prerelease(i); - if (!prerelease) { - return false; - } - return prerelease[0].includes(this.distribution); - }); - localVersionPaths.sort(semver_1.default.rcompare); - const localVersion = this.evaluateVersions(localVersionPaths); - if (localVersion) { - toolPath = tc.find('node', localVersion, this.nodeInfo.arch); - } - return toolPath; - } - validRange(versionSpec) { - let range; - const [raw, prerelease] = this.splitVersionSpec(versionSpec); - const isValidVersion = semver_1.default.valid(raw); - const rawVersion = (isValidVersion ? raw : semver_1.default.coerce(raw)); - if (prerelease !== this.distribution) { - range = versionSpec; - } - else { - range = `${semver_1.default.validRange(`^${rawVersion}-${this.distribution}`)}-0`; - } - return { range, options: { includePrerelease: !isValidVersion } }; - } - splitVersionSpec(versionSpec) { - return versionSpec.split(/-(.*)/s); - } -} -exports["default"] = BasePrereleaseNodejs; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tc = __importStar(__nccwpck_require__(7784)); +const semver_1 = __importDefault(__nccwpck_require__(5911)); +const base_distribution_1 = __importDefault(__nccwpck_require__(7)); +class BasePrereleaseNodejs extends base_distribution_1.default { + constructor(nodeInfo) { + super(nodeInfo); + } + findVersionInHostedToolCacheDirectory() { + let toolPath = ''; + const localVersionPaths = tc + .findAllVersions('node', this.nodeInfo.arch) + .filter(i => { + const prerelease = semver_1.default.prerelease(i); + if (!prerelease) { + return false; + } + return prerelease[0].includes(this.distribution); + }); + localVersionPaths.sort(semver_1.default.rcompare); + const localVersion = this.evaluateVersions(localVersionPaths); + if (localVersion) { + toolPath = tc.find('node', localVersion, this.nodeInfo.arch); + } + return toolPath; + } + validRange(versionSpec) { + let range; + const [raw, prerelease] = this.splitVersionSpec(versionSpec); + const isValidVersion = semver_1.default.valid(raw); + const rawVersion = (isValidVersion ? raw : semver_1.default.coerce(raw)); + if (prerelease !== this.distribution) { + range = versionSpec; + } + else { + range = `${semver_1.default.validRange(`^${rawVersion}-${this.distribution}`)}-0`; + } + return { range, options: { includePrerelease: !isValidVersion } }; + } + splitVersionSpec(versionSpec) { + return versionSpec.split(/-(.*)/s); + } +} +exports["default"] = BasePrereleaseNodejs; /***/ }), @@ -71409,275 +71570,275 @@ exports["default"] = BasePrereleaseNodejs; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const tc = __importStar(__nccwpck_require__(7784)); -const hc = __importStar(__nccwpck_require__(9925)); -const core = __importStar(__nccwpck_require__(2186)); -const io = __importStar(__nccwpck_require__(7436)); -const semver_1 = __importDefault(__nccwpck_require__(5911)); -const assert = __importStar(__nccwpck_require__(9491)); -const path = __importStar(__nccwpck_require__(1017)); -const os_1 = __importDefault(__nccwpck_require__(2037)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -class BaseDistribution { - constructor(nodeInfo) { - this.nodeInfo = nodeInfo; - this.osPlat = os_1.default.platform(); - this.httpClient = new hc.HttpClient('setup-node', [], { - allowRetries: true, - maxRetries: 3 - }); - } - setupNodeJs() { - return __awaiter(this, void 0, void 0, function* () { - let nodeJsVersions; - if (this.nodeInfo.checkLatest) { - const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); - this.nodeInfo.versionSpec = evaluatedVersion; - } - let toolPath = this.findVersionInHostedToolCacheDirectory(); - if (toolPath) { - core.info(`Found in cache @ ${toolPath}`); - } - else { - const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); - const toolName = this.getNodejsDistInfo(evaluatedVersion); - toolPath = yield this.downloadNodejs(toolName); - } - if (this.osPlat != 'win32') { - toolPath = path.join(toolPath, 'bin'); - } - core.addPath(toolPath); - }); - } - findVersionInDist(nodeJsVersions) { - return __awaiter(this, void 0, void 0, function* () { - if (!nodeJsVersions) { - nodeJsVersions = yield this.getNodeJsVersions(); - } - const versions = this.filterVersions(nodeJsVersions); - const evaluatedVersion = this.evaluateVersions(versions); - if (!evaluatedVersion) { - throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); - } - return evaluatedVersion; - }); - } - evaluateVersions(versions) { - let version = ''; - const { range, options } = this.validRange(this.nodeInfo.versionSpec); - core.debug(`evaluating ${versions.length} versions`); - for (const potential of versions) { - const satisfied = semver_1.default.satisfies(potential, range, options); - if (satisfied) { - version = potential; - break; - } - } - if (version) { - core.debug(`matched: ${version}`); - } - else { - core.debug('match not found'); - } - return version; - } - findVersionInHostedToolCacheDirectory() { - return tc.find('node', this.nodeInfo.versionSpec, this.nodeInfo.arch); - } - getNodeJsVersions() { - return __awaiter(this, void 0, void 0, function* () { - const initialUrl = this.getDistributionUrl(); - const dataUrl = `${initialUrl}/index.json`; - const response = yield this.httpClient.getJson(dataUrl); - return response.result || []; - }); - } - getNodejsDistInfo(version) { - const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); - version = semver_1.default.clean(version) || ''; - const fileName = this.osPlat == 'win32' - ? `node-v${version}-win-${osArch}` - : `node-v${version}-${this.osPlat}-${osArch}`; - const urlFileName = this.osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`; - const initialUrl = this.getDistributionUrl(); - const url = `${initialUrl}/v${version}/${urlFileName}`; - return { - downloadUrl: url, - resolvedVersion: version, - arch: osArch, - fileName: fileName - }; - } - downloadNodejs(info) { - return __awaiter(this, void 0, void 0, function* () { - let downloadPath = ''; - core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); - try { - downloadPath = yield tc.downloadTool(info.downloadUrl); - } - catch (err) { - if (err instanceof tc.HTTPError && - err.httpStatusCode == 404 && - this.osPlat == 'win32') { - return yield this.acquireWindowsNodeFromFallbackLocation(info.resolvedVersion, info.arch); - } - throw err; - } - const toolPath = yield this.extractArchive(downloadPath, info); - core.info('Done'); - return toolPath; - }); - } - validRange(versionSpec) { - var _a; - let options; - const c = semver_1.default.clean(versionSpec) || ''; - const valid = (_a = semver_1.default.valid(c)) !== null && _a !== void 0 ? _a : versionSpec; - return { range: valid, options }; - } - acquireWindowsNodeFromFallbackLocation(version, arch = os_1.default.arch()) { - return __awaiter(this, void 0, void 0, function* () { - const initialUrl = this.getDistributionUrl(); - const osArch = this.translateArchToDistUrl(arch); - // Create temporary folder to download in to - const tempDownloadFolder = 'temp_' + Math.floor(Math.random() * 2000000000); - const tempDirectory = process.env['RUNNER_TEMP'] || ''; - assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); - const tempDir = path.join(tempDirectory, tempDownloadFolder); - yield io.mkdirP(tempDir); - let exeUrl; - let libUrl; - try { - exeUrl = `${initialUrl}/v${version}/win-${osArch}/node.exe`; - libUrl = `${initialUrl}/v${version}/win-${osArch}/node.lib`; - core.info(`Downloading only node binary from ${exeUrl}`); - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - catch (err) { - if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { - exeUrl = `${initialUrl}/v${version}/node.exe`; - libUrl = `${initialUrl}/v${version}/node.lib`; - const exePath = yield tc.downloadTool(exeUrl); - yield io.cp(exePath, path.join(tempDir, 'node.exe')); - const libPath = yield tc.downloadTool(libUrl); - yield io.cp(libPath, path.join(tempDir, 'node.lib')); - } - else { - throw err; - } - } - const toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); - return toolPath; - }); - } - extractArchive(downloadPath, info) { - return __awaiter(this, void 0, void 0, function* () { - // - // Extract - // - core.info('Extracting ...'); - let extPath; - info = info || {}; // satisfy compiler, never null when reaches here - if (this.osPlat == 'win32') { - const _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); - extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); - // 7z extracts to folder matching file name - const nestedPath = path.join(extPath, path.basename(info.fileName, '.7z')); - if (fs_1.default.existsSync(nestedPath)) { - extPath = nestedPath; - } - } - else { - extPath = yield tc.extractTar(downloadPath, undefined, [ - 'xz', - '--strip', - '1' - ]); - } - // - // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded - // - core.info('Adding to the cache ...'); - const toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); - return toolPath; - }); - } - getDistFileName() { - const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); - // node offers a json list of versions - let dataFileName; - switch (this.osPlat) { - case 'linux': - dataFileName = `linux-${osArch}`; - break; - case 'darwin': - dataFileName = `osx-${osArch}-tar`; - break; - case 'win32': - dataFileName = `win-${osArch}-exe`; - break; - default: - throw new Error(`Unexpected OS '${this.osPlat}'`); - } - return dataFileName; - } - filterVersions(nodeJsVersions) { - const versions = []; - const dataFileName = this.getDistFileName(); - nodeJsVersions.forEach((nodeVersion) => { - // ensure this version supports your os and platform - if (nodeVersion.files.indexOf(dataFileName) >= 0) { - versions.push(nodeVersion.version); - } - }); - return versions.sort(semver_1.default.rcompare); - } - translateArchToDistUrl(arch) { - switch (arch) { - case 'arm': - return 'armv7l'; - default: - return arch; - } - } -} -exports["default"] = BaseDistribution; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tc = __importStar(__nccwpck_require__(7784)); +const hc = __importStar(__nccwpck_require__(9925)); +const core = __importStar(__nccwpck_require__(2186)); +const io = __importStar(__nccwpck_require__(7436)); +const semver_1 = __importDefault(__nccwpck_require__(5911)); +const assert = __importStar(__nccwpck_require__(9491)); +const path = __importStar(__nccwpck_require__(1017)); +const os_1 = __importDefault(__nccwpck_require__(2037)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +class BaseDistribution { + constructor(nodeInfo) { + this.nodeInfo = nodeInfo; + this.osPlat = os_1.default.platform(); + this.httpClient = new hc.HttpClient('setup-node', [], { + allowRetries: true, + maxRetries: 3 + }); + } + setupNodeJs() { + return __awaiter(this, void 0, void 0, function* () { + let nodeJsVersions; + if (this.nodeInfo.checkLatest) { + const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); + this.nodeInfo.versionSpec = evaluatedVersion; + } + let toolPath = this.findVersionInHostedToolCacheDirectory(); + if (toolPath) { + core.info(`Found in cache @ ${toolPath}`); + } + else { + const evaluatedVersion = yield this.findVersionInDist(nodeJsVersions); + const toolName = this.getNodejsDistInfo(evaluatedVersion); + toolPath = yield this.downloadNodejs(toolName); + } + if (this.osPlat != 'win32') { + toolPath = path.join(toolPath, 'bin'); + } + core.addPath(toolPath); + }); + } + findVersionInDist(nodeJsVersions) { + return __awaiter(this, void 0, void 0, function* () { + if (!nodeJsVersions) { + nodeJsVersions = yield this.getNodeJsVersions(); + } + const versions = this.filterVersions(nodeJsVersions); + const evaluatedVersion = this.evaluateVersions(versions); + if (!evaluatedVersion) { + throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); + } + return evaluatedVersion; + }); + } + evaluateVersions(versions) { + let version = ''; + const { range, options } = this.validRange(this.nodeInfo.versionSpec); + core.debug(`evaluating ${versions.length} versions`); + for (const potential of versions) { + const satisfied = semver_1.default.satisfies(potential, range, options); + if (satisfied) { + version = potential; + break; + } + } + if (version) { + core.debug(`matched: ${version}`); + } + else { + core.debug('match not found'); + } + return version; + } + findVersionInHostedToolCacheDirectory() { + return tc.find('node', this.nodeInfo.versionSpec, this.translateArchToDistUrl(this.nodeInfo.arch)); + } + getNodeJsVersions() { + return __awaiter(this, void 0, void 0, function* () { + const initialUrl = this.getDistributionUrl(); + const dataUrl = `${initialUrl}/index.json`; + const response = yield this.httpClient.getJson(dataUrl); + return response.result || []; + }); + } + getNodejsDistInfo(version) { + const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); + version = semver_1.default.clean(version) || ''; + const fileName = this.osPlat == 'win32' + ? `node-v${version}-win-${osArch}` + : `node-v${version}-${this.osPlat}-${osArch}`; + const urlFileName = this.osPlat == 'win32' ? `${fileName}.7z` : `${fileName}.tar.gz`; + const initialUrl = this.getDistributionUrl(); + const url = `${initialUrl}/v${version}/${urlFileName}`; + return { + downloadUrl: url, + resolvedVersion: version, + arch: osArch, + fileName: fileName + }; + } + downloadNodejs(info) { + return __awaiter(this, void 0, void 0, function* () { + let downloadPath = ''; + core.info(`Acquiring ${info.resolvedVersion} - ${info.arch} from ${info.downloadUrl}`); + try { + downloadPath = yield tc.downloadTool(info.downloadUrl); + } + catch (err) { + if (err instanceof tc.HTTPError && + err.httpStatusCode == 404 && + this.osPlat == 'win32') { + return yield this.acquireWindowsNodeFromFallbackLocation(info.resolvedVersion, info.arch); + } + throw err; + } + const toolPath = yield this.extractArchive(downloadPath, info); + core.info('Done'); + return toolPath; + }); + } + validRange(versionSpec) { + var _a; + let options; + const c = semver_1.default.clean(versionSpec) || ''; + const valid = (_a = semver_1.default.valid(c)) !== null && _a !== void 0 ? _a : versionSpec; + return { range: valid, options }; + } + acquireWindowsNodeFromFallbackLocation(version, arch = os_1.default.arch()) { + return __awaiter(this, void 0, void 0, function* () { + const initialUrl = this.getDistributionUrl(); + const osArch = this.translateArchToDistUrl(arch); + // Create temporary folder to download in to + const tempDownloadFolder = 'temp_' + Math.floor(Math.random() * 2000000000); + const tempDirectory = process.env['RUNNER_TEMP'] || ''; + assert.ok(tempDirectory, 'Expected RUNNER_TEMP to be defined'); + const tempDir = path.join(tempDirectory, tempDownloadFolder); + yield io.mkdirP(tempDir); + let exeUrl; + let libUrl; + try { + exeUrl = `${initialUrl}/v${version}/win-${osArch}/node.exe`; + libUrl = `${initialUrl}/v${version}/win-${osArch}/node.lib`; + core.info(`Downloading only node binary from ${exeUrl}`); + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + catch (err) { + if (err instanceof tc.HTTPError && err.httpStatusCode == 404) { + exeUrl = `${initialUrl}/v${version}/node.exe`; + libUrl = `${initialUrl}/v${version}/node.lib`; + const exePath = yield tc.downloadTool(exeUrl); + yield io.cp(exePath, path.join(tempDir, 'node.exe')); + const libPath = yield tc.downloadTool(libUrl); + yield io.cp(libPath, path.join(tempDir, 'node.lib')); + } + else { + throw err; + } + } + const toolPath = yield tc.cacheDir(tempDir, 'node', version, arch); + return toolPath; + }); + } + extractArchive(downloadPath, info) { + return __awaiter(this, void 0, void 0, function* () { + // + // Extract + // + core.info('Extracting ...'); + let extPath; + info = info || {}; // satisfy compiler, never null when reaches here + if (this.osPlat == 'win32') { + const _7zPath = path.join(__dirname, '../..', 'externals', '7zr.exe'); + extPath = yield tc.extract7z(downloadPath, undefined, _7zPath); + // 7z extracts to folder matching file name + const nestedPath = path.join(extPath, path.basename(info.fileName, '.7z')); + if (fs_1.default.existsSync(nestedPath)) { + extPath = nestedPath; + } + } + else { + extPath = yield tc.extractTar(downloadPath, undefined, [ + 'xz', + '--strip', + '1' + ]); + } + // + // Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded + // + core.info('Adding to the cache ...'); + const toolPath = yield tc.cacheDir(extPath, 'node', info.resolvedVersion, info.arch); + return toolPath; + }); + } + getDistFileName() { + const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); + // node offers a json list of versions + let dataFileName; + switch (this.osPlat) { + case 'linux': + dataFileName = `linux-${osArch}`; + break; + case 'darwin': + dataFileName = `osx-${osArch}-tar`; + break; + case 'win32': + dataFileName = `win-${osArch}-exe`; + break; + default: + throw new Error(`Unexpected OS '${this.osPlat}'`); + } + return dataFileName; + } + filterVersions(nodeJsVersions) { + const versions = []; + const dataFileName = this.getDistFileName(); + nodeJsVersions.forEach((nodeVersion) => { + // ensure this version supports your os and platform + if (nodeVersion.files.indexOf(dataFileName) >= 0) { + versions.push(nodeVersion.version); + } + }); + return versions.sort(semver_1.default.rcompare); + } + translateArchToDistUrl(arch) { + switch (arch) { + case 'arm': + return 'armv7l'; + default: + return arch; + } + } +} +exports["default"] = BaseDistribution; /***/ }), @@ -71686,41 +71847,41 @@ exports["default"] = BaseDistribution; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getNodejsDistribution = void 0; -const nightly_builds_1 = __importDefault(__nccwpck_require__(7127)); -const official_builds_1 = __importDefault(__nccwpck_require__(7854)); -const rc_builds_1 = __importDefault(__nccwpck_require__(8837)); -const canary_builds_1 = __importDefault(__nccwpck_require__(969)); -var Distributions; -(function (Distributions) { - Distributions["DEFAULT"] = ""; - Distributions["CANARY"] = "v8-canary"; - Distributions["NIGHTLY"] = "nightly"; - Distributions["RC"] = "rc"; -})(Distributions || (Distributions = {})); -function getNodejsDistribution(installerOptions) { - const versionSpec = installerOptions.versionSpec; - let distribution; - if (versionSpec.includes(Distributions.NIGHTLY)) { - distribution = new nightly_builds_1.default(installerOptions); - } - else if (versionSpec.includes(Distributions.CANARY)) { - distribution = new canary_builds_1.default(installerOptions); - } - else if (versionSpec.includes(Distributions.RC)) { - distribution = new rc_builds_1.default(installerOptions); - } - else { - distribution = new official_builds_1.default(installerOptions); - } - return distribution; -} -exports.getNodejsDistribution = getNodejsDistribution; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getNodejsDistribution = void 0; +const nightly_builds_1 = __importDefault(__nccwpck_require__(7127)); +const official_builds_1 = __importDefault(__nccwpck_require__(7854)); +const rc_builds_1 = __importDefault(__nccwpck_require__(8837)); +const canary_builds_1 = __importDefault(__nccwpck_require__(969)); +var Distributions; +(function (Distributions) { + Distributions["DEFAULT"] = ""; + Distributions["CANARY"] = "v8-canary"; + Distributions["NIGHTLY"] = "nightly"; + Distributions["RC"] = "rc"; +})(Distributions || (Distributions = {})); +function getNodejsDistribution(installerOptions) { + const versionSpec = installerOptions.versionSpec; + let distribution; + if (versionSpec.includes(Distributions.NIGHTLY)) { + distribution = new nightly_builds_1.default(installerOptions); + } + else if (versionSpec.includes(Distributions.CANARY)) { + distribution = new canary_builds_1.default(installerOptions); + } + else if (versionSpec.includes(Distributions.RC)) { + distribution = new rc_builds_1.default(installerOptions); + } + else { + distribution = new official_builds_1.default(installerOptions); + } + return distribution; +} +exports.getNodejsDistribution = getNodejsDistribution; /***/ }), @@ -71729,22 +71890,22 @@ exports.getNodejsDistribution = getNodejsDistribution; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(957)); -class NightlyNodejs extends base_distribution_prerelease_1.default { - constructor(nodeInfo) { - super(nodeInfo); - this.distribution = 'nightly'; - } - getDistributionUrl() { - return 'https://nodejs.org/download/nightly'; - } -} -exports["default"] = NightlyNodejs; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(957)); +class NightlyNodejs extends base_distribution_prerelease_1.default { + constructor(nodeInfo) { + super(nodeInfo); + this.distribution = 'nightly'; + } + getDistributionUrl() { + return 'https://nodejs.org/download/nightly'; + } +} +exports["default"] = NightlyNodejs; /***/ }), @@ -71753,203 +71914,203 @@ exports["default"] = NightlyNodejs; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__nccwpck_require__(2186)); -const tc = __importStar(__nccwpck_require__(7784)); -const path_1 = __importDefault(__nccwpck_require__(1017)); -const base_distribution_1 = __importDefault(__nccwpck_require__(7)); -class OfficialBuilds extends base_distribution_1.default { - constructor(nodeInfo) { - super(nodeInfo); - } - setupNodeJs() { - return __awaiter(this, void 0, void 0, function* () { - let manifest; - let nodeJsVersions; - const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); - if (this.isLtsAlias(this.nodeInfo.versionSpec)) { - core.info('Attempt to resolve LTS alias from manifest...'); - // No try-catch since it's not possible to resolve LTS alias without manifest - manifest = yield this.getManifest(); - this.nodeInfo.versionSpec = this.resolveLtsAliasFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, manifest); - } - if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { - nodeJsVersions = yield this.getNodeJsVersions(); - const versions = this.filterVersions(nodeJsVersions); - this.nodeInfo.versionSpec = this.evaluateVersions(versions); - core.info('getting latest node version...'); - } - if (this.nodeInfo.checkLatest) { - core.info('Attempt to resolve the latest version from manifest...'); - const resolvedVersion = yield this.resolveVersionFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); - if (resolvedVersion) { - this.nodeInfo.versionSpec = resolvedVersion; - core.info(`Resolved as '${resolvedVersion}'`); - } - else { - core.info(`Failed to resolve version ${this.nodeInfo.versionSpec} from manifest`); - } - } - let toolPath = this.findVersionInHostedToolCacheDirectory(); - if (toolPath) { - core.info(`Found in cache @ ${toolPath}`); - } - else { - let downloadPath = ''; - try { - core.info(`Attempting to download ${this.nodeInfo.versionSpec}...`); - const versionInfo = yield this.getInfoFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); - if (versionInfo) { - core.info(`Acquiring ${versionInfo.resolvedVersion} - ${versionInfo.arch} from ${versionInfo.downloadUrl}`); - downloadPath = yield tc.downloadTool(versionInfo.downloadUrl, undefined, this.nodeInfo.auth); - if (downloadPath) { - toolPath = yield this.extractArchive(downloadPath, versionInfo); - } - } - else { - core.info('Not found in manifest. Falling back to download directly from Node'); - } - } - catch (err) { - // Rate limit? - if (err instanceof tc.HTTPError && - (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { - core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); - } - else { - core.info(err.message); - } - core.debug(err.stack); - core.info('Falling back to download directly from Node'); - } - if (!toolPath) { - const nodeJsVersions = yield this.getNodeJsVersions(); - const versions = this.filterVersions(nodeJsVersions); - const evaluatedVersion = this.evaluateVersions(versions); - if (!evaluatedVersion) { - throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); - } - const toolName = this.getNodejsDistInfo(evaluatedVersion); - toolPath = yield this.downloadNodejs(toolName); - } - } - if (this.osPlat != 'win32') { - toolPath = path_1.default.join(toolPath, 'bin'); - } - core.addPath(toolPath); - }); - } - evaluateVersions(versions) { - let version = ''; - if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { - core.info(`getting latest node version...`); - return versions[0]; - } - version = super.evaluateVersions(versions); - return version; - } - getDistributionUrl() { - return `https://nodejs.org/dist`; - } - getManifest() { - core.debug('Getting manifest from actions/node-versions@main'); - return tc.getManifestFromRepo('actions', 'node-versions', this.nodeInfo.auth, 'main'); - } - resolveLtsAliasFromManifest(versionSpec, stable, manifest) { - var _a; - const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (!alias) { - throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); - } - core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); - // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. - const n = Number(alias); - const aliases = Object.fromEntries(manifest - .filter(x => x.lts && x.stable === stable) - .map(x => [x.lts.toLowerCase(), x]) - .reverse()); - const numbered = Object.values(aliases); - const release = alias === '*' - ? numbered[numbered.length - 1] - : n < 0 - ? numbered[numbered.length - 1 + n] - : aliases[alias]; - if (!release) { - throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); - } - core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); - return release.version.split('.')[0]; - } - resolveVersionFromManifest(versionSpec, stable, osArch, manifest) { - return __awaiter(this, void 0, void 0, function* () { - try { - const info = yield this.getInfoFromManifest(versionSpec, stable, osArch, manifest); - return info === null || info === void 0 ? void 0 : info.resolvedVersion; - } - catch (err) { - core.info('Unable to resolve version from manifest...'); - core.debug(err.message); - } - }); - } - getInfoFromManifest(versionSpec, stable, osArch, manifest) { - return __awaiter(this, void 0, void 0, function* () { - let info = null; - if (!manifest) { - core.debug('No manifest cached'); - manifest = yield this.getManifest(); - } - const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); - if (rel && rel.files.length > 0) { - info = {}; - info.resolvedVersion = rel.version; - info.arch = rel.files[0].arch; - info.downloadUrl = rel.files[0].download_url; - info.fileName = rel.files[0].filename; - } - return info; - }); - } - isLtsAlias(versionSpec) { - return versionSpec.startsWith('lts/'); - } - isLatestSyntax(versionSpec) { - return ['current', 'latest', 'node'].includes(versionSpec); - } -} -exports["default"] = OfficialBuilds; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(2186)); +const tc = __importStar(__nccwpck_require__(7784)); +const path_1 = __importDefault(__nccwpck_require__(1017)); +const base_distribution_1 = __importDefault(__nccwpck_require__(7)); +class OfficialBuilds extends base_distribution_1.default { + constructor(nodeInfo) { + super(nodeInfo); + } + setupNodeJs() { + return __awaiter(this, void 0, void 0, function* () { + let manifest; + let nodeJsVersions; + const osArch = this.translateArchToDistUrl(this.nodeInfo.arch); + if (this.isLtsAlias(this.nodeInfo.versionSpec)) { + core.info('Attempt to resolve LTS alias from manifest...'); + // No try-catch since it's not possible to resolve LTS alias without manifest + manifest = yield this.getManifest(); + this.nodeInfo.versionSpec = this.resolveLtsAliasFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, manifest); + } + if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { + nodeJsVersions = yield this.getNodeJsVersions(); + const versions = this.filterVersions(nodeJsVersions); + this.nodeInfo.versionSpec = this.evaluateVersions(versions); + core.info('getting latest node version...'); + } + if (this.nodeInfo.checkLatest) { + core.info('Attempt to resolve the latest version from manifest...'); + const resolvedVersion = yield this.resolveVersionFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); + if (resolvedVersion) { + this.nodeInfo.versionSpec = resolvedVersion; + core.info(`Resolved as '${resolvedVersion}'`); + } + else { + core.info(`Failed to resolve version ${this.nodeInfo.versionSpec} from manifest`); + } + } + let toolPath = this.findVersionInHostedToolCacheDirectory(); + if (toolPath) { + core.info(`Found in cache @ ${toolPath}`); + } + else { + let downloadPath = ''; + try { + core.info(`Attempting to download ${this.nodeInfo.versionSpec}...`); + const versionInfo = yield this.getInfoFromManifest(this.nodeInfo.versionSpec, this.nodeInfo.stable, osArch, manifest); + if (versionInfo) { + core.info(`Acquiring ${versionInfo.resolvedVersion} - ${versionInfo.arch} from ${versionInfo.downloadUrl}`); + downloadPath = yield tc.downloadTool(versionInfo.downloadUrl, undefined, this.nodeInfo.auth); + if (downloadPath) { + toolPath = yield this.extractArchive(downloadPath, versionInfo); + } + } + else { + core.info('Not found in manifest. Falling back to download directly from Node'); + } + } + catch (err) { + // Rate limit? + if (err instanceof tc.HTTPError && + (err.httpStatusCode === 403 || err.httpStatusCode === 429)) { + core.info(`Received HTTP status code ${err.httpStatusCode}. This usually indicates the rate limit has been exceeded`); + } + else { + core.info(err.message); + } + core.debug(err.stack); + core.info('Falling back to download directly from Node'); + } + if (!toolPath) { + const nodeJsVersions = yield this.getNodeJsVersions(); + const versions = this.filterVersions(nodeJsVersions); + const evaluatedVersion = this.evaluateVersions(versions); + if (!evaluatedVersion) { + throw new Error(`Unable to find Node version '${this.nodeInfo.versionSpec}' for platform ${this.osPlat} and architecture ${this.nodeInfo.arch}.`); + } + const toolName = this.getNodejsDistInfo(evaluatedVersion); + toolPath = yield this.downloadNodejs(toolName); + } + } + if (this.osPlat != 'win32') { + toolPath = path_1.default.join(toolPath, 'bin'); + } + core.addPath(toolPath); + }); + } + evaluateVersions(versions) { + let version = ''; + if (this.isLatestSyntax(this.nodeInfo.versionSpec)) { + core.info(`getting latest node version...`); + return versions[0]; + } + version = super.evaluateVersions(versions); + return version; + } + getDistributionUrl() { + return `https://nodejs.org/dist`; + } + getManifest() { + core.debug('Getting manifest from actions/node-versions@main'); + return tc.getManifestFromRepo('actions', 'node-versions', this.nodeInfo.auth, 'main'); + } + resolveLtsAliasFromManifest(versionSpec, stable, manifest) { + var _a; + const alias = (_a = versionSpec.split('lts/')[1]) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (!alias) { + throw new Error(`Unable to parse LTS alias for Node version '${versionSpec}'`); + } + core.debug(`LTS alias '${alias}' for Node version '${versionSpec}'`); + // Supported formats are `lts/`, `lts/*`, and `lts/-n`. Where asterisk means highest possible LTS and -n means the nth-highest. + const n = Number(alias); + const aliases = Object.fromEntries(manifest + .filter(x => x.lts && x.stable === stable) + .map(x => [x.lts.toLowerCase(), x]) + .reverse()); + const numbered = Object.values(aliases); + const release = alias === '*' + ? numbered[numbered.length - 1] + : n < 0 + ? numbered[numbered.length - 1 + n] + : aliases[alias]; + if (!release) { + throw new Error(`Unable to find LTS release '${alias}' for Node version '${versionSpec}'.`); + } + core.debug(`Found LTS release '${release.version}' for Node version '${versionSpec}'`); + return release.version.split('.')[0]; + } + resolveVersionFromManifest(versionSpec, stable, osArch, manifest) { + return __awaiter(this, void 0, void 0, function* () { + try { + const info = yield this.getInfoFromManifest(versionSpec, stable, osArch, manifest); + return info === null || info === void 0 ? void 0 : info.resolvedVersion; + } + catch (err) { + core.info('Unable to resolve version from manifest...'); + core.debug(err.message); + } + }); + } + getInfoFromManifest(versionSpec, stable, osArch, manifest) { + return __awaiter(this, void 0, void 0, function* () { + let info = null; + if (!manifest) { + core.debug('No manifest cached'); + manifest = yield this.getManifest(); + } + const rel = yield tc.findFromManifest(versionSpec, stable, manifest, osArch); + if (rel && rel.files.length > 0) { + info = {}; + info.resolvedVersion = rel.version; + info.arch = rel.files[0].arch; + info.downloadUrl = rel.files[0].download_url; + info.fileName = rel.files[0].filename; + } + return info; + }); + } + isLtsAlias(versionSpec) { + return versionSpec.startsWith('lts/'); + } + isLatestSyntax(versionSpec) { + return ['current', 'latest', 'node'].includes(versionSpec); + } +} +exports["default"] = OfficialBuilds; /***/ }), @@ -71958,21 +72119,21 @@ exports["default"] = OfficialBuilds; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const base_distribution_1 = __importDefault(__nccwpck_require__(7)); -class RcBuild extends base_distribution_1.default { - constructor(nodeInfo) { - super(nodeInfo); - } - getDistributionUrl() { - return 'https://nodejs.org/download/rc'; - } -} -exports["default"] = RcBuild; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const base_distribution_1 = __importDefault(__nccwpck_require__(7)); +class RcBuild extends base_distribution_1.default { + constructor(nodeInfo) { + super(nodeInfo); + } + getDistributionUrl() { + return 'https://nodejs.org/download/rc'; + } +} +exports["default"] = RcBuild; /***/ }), @@ -71981,22 +72142,22 @@ exports["default"] = RcBuild; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(957)); -class CanaryBuild extends base_distribution_prerelease_1.default { - constructor(nodeInfo) { - super(nodeInfo); - this.distribution = 'v8-canary'; - } - getDistributionUrl() { - return 'https://nodejs.org/download/v8-canary'; - } -} -exports["default"] = CanaryBuild; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const base_distribution_prerelease_1 = __importDefault(__nccwpck_require__(957)); +class CanaryBuild extends base_distribution_prerelease_1.default { + constructor(nodeInfo) { + super(nodeInfo); + this.distribution = 'v8-canary'; + } + getDistributionUrl() { + return 'https://nodejs.org/download/v8-canary'; + } +} +exports["default"] = CanaryBuild; /***/ }), @@ -72005,122 +72166,122 @@ exports["default"] = CanaryBuild; /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.run = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const os_1 = __importDefault(__nccwpck_require__(2037)); -const auth = __importStar(__nccwpck_require__(7573)); -const path = __importStar(__nccwpck_require__(1017)); -const cache_restore_1 = __nccwpck_require__(9517); -const cache_utils_1 = __nccwpck_require__(1678); -const installer_factory_1 = __nccwpck_require__(5617); -const util_1 = __nccwpck_require__(2629); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - // - // Version is optional. If supplied, install / use from the tool cache - // If not supplied then task is still used to setup proxy, auth, etc... - // - const version = resolveVersionInput(); - let arch = core.getInput('architecture'); - const cache = core.getInput('cache'); - // if architecture supplied but node-version is not - // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. - if (arch && !version) { - core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); - } - if (!arch) { - arch = os_1.default.arch(); - } - if (version) { - const token = core.getInput('token'); - const auth = !token ? undefined : `token ${token}`; - const stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; - const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; - const nodejsInfo = { - versionSpec: version, - checkLatest, - auth, - stable, - arch - }; - const nodeDistribution = installer_factory_1.getNodejsDistribution(nodejsInfo); - yield nodeDistribution.setupNodeJs(); - } - yield util_1.printEnvDetailsAndSetOutput(); - const registryUrl = core.getInput('registry-url'); - const alwaysAuth = core.getInput('always-auth'); - if (registryUrl) { - auth.configAuthentication(registryUrl, alwaysAuth); - } - if (cache && cache_utils_1.isCacheFeatureAvailable()) { - const cacheDependencyPath = core.getInput('cache-dependency-path'); - yield cache_restore_1.restoreCache(cache, cacheDependencyPath); - } - const matchersPath = path.join(__dirname, '../..', '.github'); - core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); - core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); - } - catch (err) { - core.setFailed(err.message); - } - }); -} -exports.run = run; -function resolveVersionInput() { - let version = core.getInput('node-version'); - const versionFileInput = core.getInput('node-version-file'); - if (version && versionFileInput) { - core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); - } - if (version) { - return version; - } - if (versionFileInput) { - const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); - if (!fs_1.default.existsSync(versionFilePath)) { - throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); - } - version = util_1.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); - core.info(`Resolved ${versionFileInput} as ${version}`); - } - return version; -} + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const fs_1 = __importDefault(__nccwpck_require__(7147)); +const os_1 = __importDefault(__nccwpck_require__(2037)); +const auth = __importStar(__nccwpck_require__(7573)); +const path = __importStar(__nccwpck_require__(1017)); +const cache_restore_1 = __nccwpck_require__(9517); +const cache_utils_1 = __nccwpck_require__(1678); +const installer_factory_1 = __nccwpck_require__(5617); +const util_1 = __nccwpck_require__(2629); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + // + // Version is optional. If supplied, install / use from the tool cache + // If not supplied then task is still used to setup proxy, auth, etc... + // + const version = resolveVersionInput(); + let arch = core.getInput('architecture'); + const cache = core.getInput('cache'); + // if architecture supplied but node-version is not + // if we don't throw a warning, the already installed x64 node will be used which is not probably what user meant. + if (arch && !version) { + core.warning('`architecture` is provided but `node-version` is missing. In this configuration, the version/architecture of Node will not be changed. To fix this, provide `architecture` in combination with `node-version`'); + } + if (!arch) { + arch = os_1.default.arch(); + } + if (version) { + const token = core.getInput('token'); + const auth = !token ? undefined : `token ${token}`; + const stable = (core.getInput('stable') || 'true').toUpperCase() === 'TRUE'; + const checkLatest = (core.getInput('check-latest') || 'false').toUpperCase() === 'TRUE'; + const nodejsInfo = { + versionSpec: version, + checkLatest, + auth, + stable, + arch + }; + const nodeDistribution = installer_factory_1.getNodejsDistribution(nodejsInfo); + yield nodeDistribution.setupNodeJs(); + } + yield util_1.printEnvDetailsAndSetOutput(); + const registryUrl = core.getInput('registry-url'); + const alwaysAuth = core.getInput('always-auth'); + if (registryUrl) { + auth.configAuthentication(registryUrl, alwaysAuth); + } + if (cache && cache_utils_1.isCacheFeatureAvailable()) { + const cacheDependencyPath = core.getInput('cache-dependency-path'); + yield cache_restore_1.restoreCache(cache, cacheDependencyPath); + } + const matchersPath = path.join(__dirname, '../..', '.github'); + core.info(`##[add-matcher]${path.join(matchersPath, 'tsc.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-stylish.json')}`); + core.info(`##[add-matcher]${path.join(matchersPath, 'eslint-compact.json')}`); + } + catch (err) { + core.setFailed(err.message); + } + }); +} +exports.run = run; +function resolveVersionInput() { + let version = core.getInput('node-version'); + const versionFileInput = core.getInput('node-version-file'); + if (version && versionFileInput) { + core.warning('Both node-version and node-version-file inputs are specified, only node-version will be used'); + } + if (version) { + return version; + } + if (versionFileInput) { + const versionFilePath = path.join(process.env.GITHUB_WORKSPACE, versionFileInput); + if (!fs_1.default.existsSync(versionFilePath)) { + throw new Error(`The specified node version file at: ${versionFilePath} does not exist`); + } + version = util_1.parseNodeVersionFile(fs_1.default.readFileSync(versionFilePath, 'utf8')); + core.info(`Resolved ${versionFileInput} as ${version}`); + } + return version; +} /***/ }), @@ -72129,98 +72290,108 @@ function resolveVersionInput() { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.printEnvDetailsAndSetOutput = exports.parseNodeVersionFile = void 0; -const core = __importStar(__nccwpck_require__(2186)); -const exec = __importStar(__nccwpck_require__(1514)); -function parseNodeVersionFile(contents) { - var _a, _b, _c; - let nodeVersion; - // Try parsing the file as an NPM `package.json` file. - try { - nodeVersion = (_a = JSON.parse(contents).volta) === null || _a === void 0 ? void 0 : _a.node; - if (!nodeVersion) - nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; - } - catch (_d) { - core.info('Node version file is not JSON file'); - } - if (!nodeVersion) { - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); - nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; - } - // In the case of an unknown format, - // return as is and evaluate the version separately. - if (!nodeVersion) - nodeVersion = contents.trim(); - return nodeVersion; -} -exports.parseNodeVersionFile = parseNodeVersionFile; -function printEnvDetailsAndSetOutput() { - return __awaiter(this, void 0, void 0, function* () { - core.startGroup('Environment details'); - const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { - const output = yield getToolVersion(tool, ['--version']); - return { tool, output }; - })); - const tools = yield Promise.all(promises); - tools.forEach(({ tool, output }) => { - if (tool === 'node') { - core.setOutput(`${tool}-version`, output); - } - core.info(`${tool}: ${output}`); - }); - core.endGroup(); - }); -} -exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; -function getToolVersion(tool, options) { - return __awaiter(this, void 0, void 0, function* () { - try { - const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { - ignoreReturnCode: true, - silent: true - }); - if (exitCode > 0) { - core.info(`[warning]${stderr}`); - return ''; - } - return stdout.trim(); - } - catch (err) { - return ''; - } - }); -} + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unique = exports.printEnvDetailsAndSetOutput = exports.parseNodeVersionFile = void 0; +const core = __importStar(__nccwpck_require__(2186)); +const exec = __importStar(__nccwpck_require__(1514)); +function parseNodeVersionFile(contents) { + var _a, _b, _c; + let nodeVersion; + // Try parsing the file as an NPM `package.json` file. + try { + nodeVersion = (_a = JSON.parse(contents).volta) === null || _a === void 0 ? void 0 : _a.node; + if (!nodeVersion) + nodeVersion = (_b = JSON.parse(contents).engines) === null || _b === void 0 ? void 0 : _b.node; + } + catch (_d) { + core.info('Node version file is not JSON file'); + } + if (!nodeVersion) { + const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); + nodeVersion = (_c = found === null || found === void 0 ? void 0 : found.groups) === null || _c === void 0 ? void 0 : _c.version; + } + // In the case of an unknown format, + // return as is and evaluate the version separately. + if (!nodeVersion) + nodeVersion = contents.trim(); + return nodeVersion; +} +exports.parseNodeVersionFile = parseNodeVersionFile; +function printEnvDetailsAndSetOutput() { + return __awaiter(this, void 0, void 0, function* () { + core.startGroup('Environment details'); + const promises = ['node', 'npm', 'yarn'].map((tool) => __awaiter(this, void 0, void 0, function* () { + const output = yield getToolVersion(tool, ['--version']); + return { tool, output }; + })); + const tools = yield Promise.all(promises); + tools.forEach(({ tool, output }) => { + if (tool === 'node') { + core.setOutput(`${tool}-version`, output); + } + core.info(`${tool}: ${output}`); + }); + core.endGroup(); + }); +} +exports.printEnvDetailsAndSetOutput = printEnvDetailsAndSetOutput; +function getToolVersion(tool, options) { + return __awaiter(this, void 0, void 0, function* () { + try { + const { stdout, stderr, exitCode } = yield exec.getExecOutput(tool, options, { + ignoreReturnCode: true, + silent: true + }); + if (exitCode > 0) { + core.info(`[warning]${stderr}`); + return ''; + } + return stdout.trim(); + } + catch (err) { + return ''; + } + }); +} +const unique = () => { + const encountered = new Set(); + return (value) => { + if (encountered.has(value)) + return false; + encountered.add(value); + return true; + }; +}; +exports.unique = unique; /***/ }), @@ -72468,10 +72639,10 @@ var __webpack_exports__ = {}; (() => { "use strict"; var exports = __webpack_exports__; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -const main_1 = __nccwpck_require__(399); -main_1.run(); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const main_1 = __nccwpck_require__(399); +main_1.run(); })(); diff --git a/docs/advanced-usage.md b/docs/advanced-usage.md index 0edc6e16..fb40e844 100644 --- a/docs/advanced-usage.md +++ b/docs/advanced-usage.md @@ -261,10 +261,14 @@ steps: with: node-version: '14' cache: 'pnpm' -- run: pnpm install --frozen-lockfile +- run: pnpm install - run: pnpm test ``` +> **Note**: By default `--frozen-lockfile` option is passed starting from pnpm `6.10.x`. It will be automatically added if you run it on [CI](https://pnpm.io/cli/install#--frozen-lockfile). +> If the `pnpm-lock.yaml` file changes then pass `--frozen-lockfile` option. + + **Using wildcard patterns to cache dependencies** ```yaml steps: @@ -401,11 +405,14 @@ steps: yarn config set npmScopes.my-org.npmAlwaysAuth true yarn config set npmScopes.my-org.npmAuthToken $NPM_AUTH_TOKEN env: - NPM_AUTH_TOKEN: ${{ secrets.YARN_TOKEN }} + NPM_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Install dependencies run: yarn install --immutable ``` -NOTE: As per https://github.com/actions/setup-node/issues/49 you cannot use `secrets.GITHUB_TOKEN` to access private GitHub Packages within the same organisation but in a different repository. + +To access private GitHub Packages within the same organization, go to "Manage Actions access" in Package settings and set the repositories you want to access. + +Please refer to the [Ensuring workflow access to your package - Configuring a package's access control and visibility](https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility#ensuring-workflow-access-to-your-package) for more details. ### always-auth input The always-auth input sets `always-auth=true` in .npmrc file. With this option set [npm](https://docs.npmjs.com/cli/v6/using-npm/config#always-auth)/yarn sends the authentication credentials when making a request to the registries. diff --git a/package-lock.json b/package-lock.json index af5e0d2b..991ca520 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,7 +17,7 @@ "@actions/http-client": "^1.0.11", "@actions/io": "^1.0.2", "@actions/tool-cache": "^1.5.4", - "semver": "^6.1.1" + "semver": "^6.3.1" }, "devDependencies": { "@types/jest": "^27.0.2", @@ -39,17 +39,18 @@ } }, "node_modules/@actions/cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz", - "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", + "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", "dependencies": { - "@actions/core": "^1.2.6", + "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.0.1", "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", - "@azure/storage-blob": "^12.8.0", + "@azure/storage-blob": "^12.13.0", "semver": "^6.1.0", "uuid": "^3.3.3" } @@ -158,14 +159,14 @@ } }, "node_modules/@azure/abort-controller": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz", - "integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", "dependencies": { - "tslib": "^2.0.0" + "tslib": "^2.2.0" }, "engines": { - "node": ">=8.0.0" + "node": ">=12.0.0" } }, "node_modules/@azure/abort-controller/node_modules/tslib": { @@ -335,15 +336,14 @@ "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, "node_modules/@azure/ms-rest-js": { - "version": "2.6.6", - "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.6.tgz", - "integrity": "sha512-WYIda8VvrkZE68xHgOxUXvjThxNf1nnGPPe0rAljqK5HJHIZ12Pi3YhEDOn3Ge7UnwaaM3eFO0VtAy4nGVI27Q==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", "dependencies": { "@azure/core-auth": "^1.1.4", "abort-controller": "^3.0.0", "form-data": "^2.5.0", "node-fetch": "^2.6.7", - "tough-cookie": "^3.0.1", "tslib": "^1.10.0", "tunnel": "0.0.6", "uuid": "^8.3.2", @@ -363,19 +363,6 @@ "node": ">= 0.12" } }, - "node_modules/@azure/ms-rest-js/node_modules/tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", - "dependencies": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/@azure/ms-rest-js/node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -458,15 +445,6 @@ "url": "https://opencollective.com/babel" } }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/@babel/core/node_modules/source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -517,15 +495,6 @@ "@babel/core": "^7.0.0" } }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/@babel/helper-function-name": { "version": "7.15.4", "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.15.4.tgz", @@ -3785,14 +3754,6 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, - "node_modules/ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "engines": { - "node": ">=4" - } - }, "node_modules/is-ci": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz", @@ -3923,15 +3884,6 @@ "node": ">=8" } }, - "node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/istanbul-lib-report": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", @@ -5538,7 +5490,8 @@ "node_modules/psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true }, "node_modules/pump": { "version": "3.0.0", @@ -5553,10 +5506,17 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, "engines": { "node": ">=6" } }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -5604,6 +5564,12 @@ "node": ">=0.10.0" } }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, "node_modules/resolve": { "version": "1.22.2", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", @@ -5720,11 +5686,11 @@ } }, "node_modules/semver": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.2.tgz", - "integrity": "sha512-z4PqiCpomGtWj8633oeAdXm1Kn1W++3T8epkZYnwiVgIYIJ0QHszhInYSJTYxebByQH7KVCEAn8R9duzZW2PhQ==", + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { - "semver": "bin/semver" + "semver": "bin/semver.js" } }, "node_modules/shebang-command": { @@ -6002,14 +5968,15 @@ } }, "node_modules/tough-cookie": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", - "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "dependencies": { "psl": "^1.1.33", "punycode": "^2.1.1", - "universalify": "^0.1.2" + "universalify": "^0.2.0", + "url-parse": "^1.5.3" }, "engines": { "node": ">=6" @@ -6174,9 +6141,9 @@ } }, "node_modules/universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", "dev": true, "engines": { "node": ">= 4.0.0" @@ -6191,6 +6158,16 @@ "punycode": "^2.1.0" } }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "node_modules/uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", @@ -6466,17 +6443,18 @@ }, "dependencies": { "@actions/cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz", - "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", + "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", "requires": { - "@actions/core": "^1.2.6", + "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.0.1", "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", - "@azure/storage-blob": "^12.8.0", + "@azure/storage-blob": "^12.13.0", "semver": "^6.1.0", "uuid": "^3.3.3" }, @@ -6582,11 +6560,11 @@ } }, "@azure/abort-controller": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz", - "integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", "requires": { - "tslib": "^2.0.0" + "tslib": "^2.2.0" }, "dependencies": { "tslib": { @@ -6742,15 +6720,14 @@ } }, "@azure/ms-rest-js": { - "version": "2.6.6", - "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.6.tgz", - "integrity": "sha512-WYIda8VvrkZE68xHgOxUXvjThxNf1nnGPPe0rAljqK5HJHIZ12Pi3YhEDOn3Ge7UnwaaM3eFO0VtAy4nGVI27Q==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz", + "integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==", "requires": { "@azure/core-auth": "^1.1.4", "abort-controller": "^3.0.0", "form-data": "^2.5.0", "node-fetch": "^2.6.7", - "tough-cookie": "^3.0.1", "tslib": "^1.10.0", "tunnel": "0.0.6", "uuid": "^8.3.2", @@ -6767,16 +6744,6 @@ "mime-types": "^2.1.12" } }, - "tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", - "requires": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" - } - }, "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -6844,12 +6811,6 @@ "source-map": "^0.5.0" }, "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -6887,14 +6848,6 @@ "@babel/helper-validator-option": "^7.14.5", "browserslist": "^4.16.6", "semver": "^6.3.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - } } }, "@babel/helper-function-name": { @@ -9367,11 +9320,6 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, - "ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=" - }, "is-ci": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz", @@ -9467,14 +9415,6 @@ "@istanbuljs/schema": "^0.1.2", "istanbul-lib-coverage": "^3.0.0", "semver": "^6.3.0" - }, - "dependencies": { - "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true - } } }, "istanbul-lib-report": { @@ -10695,7 +10635,8 @@ "psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", - "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "dev": true }, "pump": { "version": "3.0.0", @@ -10709,7 +10650,14 @@ "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true + }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true }, "queue-microtask": { "version": "1.2.3", @@ -10735,6 +10683,12 @@ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", "dev": true }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, "resolve": { "version": "1.22.2", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", @@ -10812,9 +10766,9 @@ } }, "semver": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.1.2.tgz", - "integrity": "sha512-z4PqiCpomGtWj8633oeAdXm1Kn1W++3T8epkZYnwiVgIYIJ0QHszhInYSJTYxebByQH7KVCEAn8R9duzZW2PhQ==" + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" }, "shebang-command": { "version": "1.2.0", @@ -11024,14 +10978,15 @@ } }, "tough-cookie": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", - "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "requires": { "psl": "^1.1.33", "punycode": "^2.1.1", - "universalify": "^0.1.2" + "universalify": "^0.2.0", + "url-parse": "^1.5.3" } }, "tr46": { @@ -11134,9 +11089,9 @@ } }, "universalify": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", "dev": true }, "uri-js": { @@ -11148,6 +11103,16 @@ "punycode": "^2.1.0" } }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", diff --git a/package.json b/package.json index d7a6ff69..9284818b 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,7 @@ "@actions/http-client": "^1.0.11", "@actions/io": "^1.0.2", "@actions/tool-cache": "^1.5.4", - "semver": "^6.1.1" + "semver": "^6.3.1" }, "devDependencies": { "@types/jest": "^27.0.2", diff --git a/src/cache-restore.ts b/src/cache-restore.ts index c6f14ad7..3b230970 100644 --- a/src/cache-restore.ts +++ b/src/cache-restore.ts @@ -6,14 +6,15 @@ import fs from 'fs'; import {State} from './constants'; import { - getCacheDirectoryPath, + getCacheDirectories, getPackageManagerInfo, + repoHasYarnBerryManagedDependencies, PackageManagerInfo } from './cache-utils'; export const restoreCache = async ( packageManager: string, - cacheDependencyPath?: string + cacheDependencyPath: string ) => { const packageManagerInfo = await getPackageManagerInfo(packageManager); if (!packageManagerInfo) { @@ -21,10 +22,11 @@ export const restoreCache = async ( } const platform = process.env.RUNNER_OS; - const cachePath = await getCacheDirectoryPath( + const cachePaths = await getCacheDirectories( packageManagerInfo, - packageManager + cacheDependencyPath ); + core.saveState(State.CachePaths, cachePaths); const lockFilePath = cacheDependencyPath ? cacheDependencyPath : findLockFile(packageManagerInfo); @@ -36,12 +38,26 @@ export const restoreCache = async ( ); } - const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`; + const keyPrefix = `node-cache-${platform}-${packageManager}`; + const primaryKey = `${keyPrefix}-${fileHash}`; core.debug(`primary key is ${primaryKey}`); core.saveState(State.CachePrimaryKey, primaryKey); - const cacheKey = await cache.restoreCache([cachePath], primaryKey); + const isManagedByYarnBerry = await repoHasYarnBerryManagedDependencies( + packageManagerInfo, + cacheDependencyPath + ); + let cacheKey: string | undefined; + if (isManagedByYarnBerry) { + core.info( + 'All dependencies are managed locally by yarn3, the previous cache can be used' + ); + cacheKey = await cache.restoreCache(cachePaths, primaryKey, [keyPrefix]); + } else { + cacheKey = await cache.restoreCache(cachePaths, primaryKey); + } + core.setOutput('cache-hit', Boolean(cacheKey)); if (!cacheKey) { @@ -56,6 +72,7 @@ export const restoreCache = async ( const findLockFile = (packageManager: PackageManagerInfo) => { const lockFiles = packageManager.lockFilePatterns; const workspace = process.env.GITHUB_WORKSPACE!; + const rootContent = fs.readdirSync(workspace); const lockFile = lockFiles.find(item => rootContent.includes(item)); diff --git a/src/cache-save.ts b/src/cache-save.ts index 24565a8e..9449fb85 100644 --- a/src/cache-save.ts +++ b/src/cache-save.ts @@ -1,8 +1,10 @@ import * as core from '@actions/core'; import * as cache from '@actions/cache'; + import fs from 'fs'; + import {State} from './constants'; -import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils'; +import {getPackageManagerInfo} from './cache-utils'; // Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in // @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to @@ -24,6 +26,10 @@ export async function run() { const cachePackages = async (packageManager: string) => { const state = core.getState(State.CacheMatchedKey); const primaryKey = core.getState(State.CachePrimaryKey); + let cachePaths = JSON.parse( + core.getState(State.CachePaths) || '[]' + ) as string[]; + cachePaths = cachePaths.filter(fs.existsSync); const packageManagerInfo = await getPackageManagerInfo(packageManager); if (!packageManagerInfo) { @@ -31,14 +37,12 @@ const cachePackages = async (packageManager: string) => { return; } - const cachePath = await getCacheDirectoryPath( - packageManagerInfo, - packageManager - ); - - if (!fs.existsSync(cachePath)) { + if (!cachePaths.length) { + // TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?) + // export declare function getInput(name: string, options?: InputOptions): string; + const cacheDependencyPath = core.getInput('cache-dependency-path') || ''; throw new Error( - `Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}` + `Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}` ); } @@ -49,7 +53,7 @@ const cachePackages = async (packageManager: string) => { return; } - const cacheId = await cache.saveCache([cachePath], primaryKey); + const cacheId = await cache.saveCache(cachePaths, primaryKey); if (cacheId == -1) { return; } diff --git a/src/cache-utils.ts b/src/cache-utils.ts index 5df3e718..7066d733 100644 --- a/src/cache-utils.ts +++ b/src/cache-utils.ts @@ -1,40 +1,79 @@ import * as core from '@actions/core'; import * as exec from '@actions/exec'; import * as cache from '@actions/cache'; - -type SupportedPackageManagers = { - [prop: string]: PackageManagerInfo; -}; +import * as glob from '@actions/glob'; +import path from 'path'; +import fs from 'fs'; +import {unique} from './util'; export interface PackageManagerInfo { + name: string; lockFilePatterns: Array; - getCacheFolderCommand: string; + getCacheFolderPath: (projectDir?: string) => Promise; } +interface SupportedPackageManagers { + npm: PackageManagerInfo; + pnpm: PackageManagerInfo; + yarn: PackageManagerInfo; +} export const supportedPackageManagers: SupportedPackageManagers = { npm: { + name: 'npm', lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'], - getCacheFolderCommand: 'npm config get cache' + getCacheFolderPath: () => + getCommandOutputNotEmpty( + 'npm config get cache', + 'Could not get npm cache folder path' + ) }, pnpm: { + name: 'pnpm', lockFilePatterns: ['pnpm-lock.yaml'], - getCacheFolderCommand: 'pnpm store path --silent' + getCacheFolderPath: () => + getCommandOutputNotEmpty( + 'pnpm store path --silent', + 'Could not get pnpm cache folder path' + ) }, - yarn1: { + yarn: { + name: 'yarn', lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn cache dir' - }, - yarn2: { - lockFilePatterns: ['yarn.lock'], - getCacheFolderCommand: 'yarn config get cacheFolder' + getCacheFolderPath: async projectDir => { + const yarnVersion = await getCommandOutputNotEmpty( + `yarn --version`, + 'Could not retrieve version of yarn', + projectDir + ); + + core.debug( + `Consumed yarn version is ${yarnVersion} (working dir: "${ + projectDir || '' + }")` + ); + + const stdOut = yarnVersion.startsWith('1.') + ? await getCommandOutput('yarn cache dir', projectDir) + : await getCommandOutput('yarn config get cacheFolder', projectDir); + + if (!stdOut) { + throw new Error( + `Could not get yarn cache folder path for ${projectDir}` + ); + } + return stdOut; + } } }; -export const getCommandOutput = async (toolCommand: string) => { +export const getCommandOutput = async ( + toolCommand: string, + cwd?: string +): Promise => { let {stdout, stderr, exitCode} = await exec.getExecOutput( toolCommand, undefined, - {ignoreReturnCode: true} + {ignoreReturnCode: true, ...(cwd && {cwd})} ); if (exitCode) { @@ -47,16 +86,15 @@ export const getCommandOutput = async (toolCommand: string) => { return stdout.trim(); }; -const getPackageManagerVersion = async ( - packageManager: string, - command: string -) => { - const stdOut = await getCommandOutput(`${packageManager} ${command}`); - +export const getCommandOutputNotEmpty = async ( + toolCommand: string, + error: string, + cwd?: string +): Promise => { + const stdOut = getCommandOutput(toolCommand, cwd); if (!stdOut) { - throw new Error(`Could not retrieve version of ${packageManager}`); + throw new Error(error); } - return stdOut; }; @@ -66,35 +104,191 @@ export const getPackageManagerInfo = async (packageManager: string) => { } else if (packageManager === 'pnpm') { return supportedPackageManagers.pnpm; } else if (packageManager === 'yarn') { - const yarnVersion = await getPackageManagerVersion('yarn', '--version'); - - core.debug(`Consumed yarn version is ${yarnVersion}`); - - if (yarnVersion.startsWith('1.')) { - return supportedPackageManagers.yarn1; - } else { - return supportedPackageManagers.yarn2; - } + return supportedPackageManagers.yarn; } else { return null; } }; -export const getCacheDirectoryPath = async ( - packageManagerInfo: PackageManagerInfo, - packageManager: string -) => { - const stdOut = await getCommandOutput( - packageManagerInfo.getCacheFolderCommand - ); +/** + * getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache` + * - first through `getCacheDirectories` + * - second from `repoHasYarn3ManagedCache` + * + * it contains expensive IO operation and thus should be memoized + */ - if (!stdOut) { - throw new Error(`Could not get cache folder path for ${packageManager}`); +let projectDirectoriesMemoized: string[] | null = null; +/** + * unit test must reset memoized variables + */ +export const resetProjectDirectoriesMemoized = () => + (projectDirectoriesMemoized = null); +/** + * Expands (converts) the string input `cache-dependency-path` to list of directories that + * may be project roots + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of directories and possible + */ +const getProjectDirectoriesFromCacheDependencyPath = async ( + cacheDependencyPath: string +): Promise => { + if (projectDirectoriesMemoized !== null) { + return projectDirectoriesMemoized; } - core.debug(`${packageManager} path is ${stdOut}`); + const globber = await glob.create(cacheDependencyPath); + const cacheDependenciesPaths = await globber.glob(); - return stdOut.trim(); + const existingDirectories: string[] = cacheDependenciesPaths + .map(path.dirname) + .filter(unique()) + .map(dirName => fs.realpathSync(dirName)) + .filter(directory => fs.lstatSync(directory).isDirectory()); + + if (!existingDirectories.length) + core.warning( + `No existing directories found containing cache-dependency-path="${cacheDependencyPath}"` + ); + + projectDirectoriesMemoized = existingDirectories; + return existingDirectories; +}; + +/** + * Finds the cache directories configured for the repo if cache-dependency-path is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +const getCacheDirectoriesFromCacheDependencyPath = async ( + packageManagerInfo: PackageManagerInfo, + cacheDependencyPath: string +): Promise => { + const projectDirectories = await getProjectDirectoriesFromCacheDependencyPath( + cacheDependencyPath + ); + const cacheFoldersPaths = await Promise.all( + projectDirectories.map(async projectDirectory => { + const cacheFolderPath = await packageManagerInfo.getCacheFolderPath( + projectDirectory + ); + core.debug( + `${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"` + ); + return cacheFolderPath; + }) + ); + // uniq in order to do not cache the same directories twice + return cacheFoldersPaths.filter(unique()); +}; + +/** + * Finds the cache directories configured for the repo ignoring cache-dependency-path + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @return list of files on which the cache depends + */ +const getCacheDirectoriesForRootProject = async ( + packageManagerInfo: PackageManagerInfo +): Promise => { + const cacheFolderPath = await packageManagerInfo.getCacheFolderPath(); + core.debug( + `${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory` + ); + return [cacheFolderPath]; +}; + +/** + * A function to find the cache directories configured for the repo + * currently it handles only the case of PM=yarn && cacheDependencyPath is not empty + * @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return list of files on which the cache depends + */ +export const getCacheDirectories = async ( + packageManagerInfo: PackageManagerInfo, + cacheDependencyPath: string +): Promise => { + // For yarn, if cacheDependencyPath is set, ask information about cache folders in each project + // folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488 + if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) { + return getCacheDirectoriesFromCacheDependencyPath( + packageManagerInfo, + cacheDependencyPath + ); + } + return getCacheDirectoriesForRootProject(packageManagerInfo); +}; + +/** + * A function to check if the directory is a yarn project configured to manage + * obsolete dependencies in the local cache + * @param directory - a path to the folder + * @return - true if the directory's project is yarn managed + * - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false + * - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false + * - if local cache is not explicitly enabled (not yarn3), return false + * - return true otherwise + */ +const projectHasYarnBerryManagedDependencies = async ( + directory: string +): Promise => { + const workDir = directory || process.env.GITHUB_WORKSPACE || '.'; + core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`); + + // if .yarn/cache directory exists the cache is managed by version control system + const yarnCacheFile = path.join(workDir, '.yarn', 'cache'); + if ( + fs.existsSync(yarnCacheFile) && + fs.lstatSync(yarnCacheFile).isDirectory() + ) { + core.debug( + `"${workDir}" has .yarn/cache - dependencies are kept in the repository` + ); + return Promise.resolve(false); + } + + // NOTE: yarn1 returns 'undefined' with return code = 0 + const enableGlobalCache = await getCommandOutput( + 'yarn config get enableGlobalCache', + workDir + ); + // only local cache is not managed by yarn + const managed = enableGlobalCache.includes('false'); + if (managed) { + core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`); + return true; + } else { + core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`); + return false; + } +}; + +/** + * A function to report the repo contains Yarn managed projects + * @param packageManagerInfo - used to make sure current package manager is yarn + * @param cacheDependencyPath - either a single string or multiline string with possible glob patterns + * expected to be the result of `core.getInput('cache-dependency-path')` + * @return - true if all project directories configured to be Yarn managed + */ +export const repoHasYarnBerryManagedDependencies = async ( + packageManagerInfo: PackageManagerInfo, + cacheDependencyPath: string +): Promise => { + if (packageManagerInfo.name !== 'yarn') return false; + + const yarnDirs = cacheDependencyPath + ? await getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath) + : ['']; + + const isManagedList = await Promise.all( + yarnDirs.map(projectHasYarnBerryManagedDependencies) + ); + + return isManagedList.every(Boolean); }; export function isGhes(): boolean { diff --git a/src/constants.ts b/src/constants.ts index 021418c2..cd017266 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -6,7 +6,8 @@ export enum LockType { export enum State { CachePrimaryKey = 'CACHE_KEY', - CacheMatchedKey = 'CACHE_RESULT' + CacheMatchedKey = 'CACHE_RESULT', + CachePaths = 'CACHE_PATHS' } export enum Outputs { diff --git a/src/distributions/base-distribution.ts b/src/distributions/base-distribution.ts index f7face13..ecd73c15 100644 --- a/src/distributions/base-distribution.ts +++ b/src/distributions/base-distribution.ts @@ -88,7 +88,11 @@ export default abstract class BaseDistribution { } protected findVersionInHostedToolCacheDirectory() { - return tc.find('node', this.nodeInfo.versionSpec, this.nodeInfo.arch); + return tc.find( + 'node', + this.nodeInfo.versionSpec, + this.translateArchToDistUrl(this.nodeInfo.arch) + ); } protected async getNodeJsVersions(): Promise { diff --git a/src/util.ts b/src/util.ts index 60f2649c..3ae94a2d 100644 --- a/src/util.ts +++ b/src/util.ts @@ -13,7 +13,7 @@ export function parseNodeVersionFile(contents: string): string { } if (!nodeVersion) { - const found = contents.match(/^(?:nodejs\s+)?v?(?[^\s]+)$/m); + const found = contents.match(/^(?:node(js)?\s+)?v?(?[^\s]+)$/m); nodeVersion = found?.groups?.version; } @@ -61,3 +61,12 @@ async function getToolVersion(tool: string, options: string[]) { return ''; } } + +export const unique = () => { + const encountered = new Set(); + return (value: unknown): boolean => { + if (encountered.has(value)) return false; + encountered.add(value); + return true; + }; +};