mirror of
https://github.com/kitabisa/docker-slim-action.git
synced 2025-04-14 18:56:05 +00:00
feat: cache the bin (#8)
* feat: cache the bin Signed-off-by: Dwi Siswanto <me@dw1.io> * feat: print error while get cache Signed-off-by: Dwi Siswanto <me@dw1.io> --------- Signed-off-by: Dwi Siswanto <me@dw1.io>
This commit is contained in:
parent
728af0acc6
commit
1fe2ce2532
1
node_modules/.bin/semver
generated
vendored
Symbolic link
1
node_modules/.bin/semver
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../semver/bin/semver.js
|
503
node_modules/.package-lock.json
generated
vendored
503
node_modules/.package-lock.json
generated
vendored
@ -1,14 +1,39 @@
|
||||
{
|
||||
"name": "docker-slim-action",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 2,
|
||||
"name": "@kitabisa/docker-slim-action",
|
||||
"version": "1.0.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz",
|
||||
"integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.1.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.13.0",
|
||||
"semver": "^6.3.1",
|
||||
"uuid": "^3.3.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/cache/node_modules/uuid": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
|
||||
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
|
||||
"deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.",
|
||||
"bin": {
|
||||
"uuid": "bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/core": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
|
||||
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"uuid": "^8.3.2"
|
||||
@ -18,32 +43,275 @@
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
|
||||
"integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/http-client": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz",
|
||||
"integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==",
|
||||
"dev": true,
|
||||
"node_modules/@actions/glob": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz",
|
||||
"integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==",
|
||||
"dependencies": {
|
||||
"tunnel": "^0.0.6"
|
||||
"@actions/core": "^1.2.6",
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/http-client": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz",
|
||||
"integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==",
|
||||
"dependencies": {
|
||||
"tunnel": "^0.0.6",
|
||||
"undici": "^5.25.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz",
|
||||
"integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==",
|
||||
"dev": true
|
||||
"integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw=="
|
||||
},
|
||||
"node_modules/@azure/abort-controller": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
|
||||
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-auth": {
|
||||
"version": "1.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.7.1.tgz",
|
||||
"integrity": "sha512-dyeQwvgthqs/SlPVQbZQetpslXceHd4i5a7M/7z/lGEAVwnSluabnQOjF2/dk/hhWgMISusv1Ytp4mQ8JNy62A==",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-util": "^1.1.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-auth/node_modules/@azure/abort-controller": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz",
|
||||
"integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.4.tgz",
|
||||
"integrity": "sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ==",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^1.0.0",
|
||||
"@azure/core-auth": "^1.3.0",
|
||||
"@azure/core-tracing": "1.0.0-preview.13",
|
||||
"@azure/core-util": "^1.1.1",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"@types/node-fetch": "^2.5.0",
|
||||
"@types/tunnel": "^0.0.3",
|
||||
"form-data": "^4.0.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"process": "^0.11.10",
|
||||
"tslib": "^2.2.0",
|
||||
"tunnel": "^0.0.6",
|
||||
"uuid": "^8.3.0",
|
||||
"xml2js": "^0.5.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-http/node_modules/form-data": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-lro": {
|
||||
"version": "2.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.7.1.tgz",
|
||||
"integrity": "sha512-kXSlrNHOCTVZMxpXNRqzgh9/j4cnNXU5Hf2YjMyjddRhCXFiFRzmNaqwN+XO9rGTsCOIaaG7M67zZdyliXZG9g==",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"@azure/core-util": "^1.2.0",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-lro/node_modules/@azure/abort-controller": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz",
|
||||
"integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-paging": {
|
||||
"version": "1.6.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.6.1.tgz",
|
||||
"integrity": "sha512-3tKIQXSU3mlN+ITz0m2pXLnKK3oQ6/EVcW8ud011Iq+M0rx6Wnm7NUEpoMeOAEedeKlPtemrQzO6YWoDR71O5w==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-tracing": {
|
||||
"version": "1.0.0-preview.13",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz",
|
||||
"integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==",
|
||||
"dependencies": {
|
||||
"@opentelemetry/api": "^1.0.1",
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-util": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.8.1.tgz",
|
||||
"integrity": "sha512-L3voj0StUdJ+YKomvwnTv7gHzguJO+a6h30pmmZdRprJCM+RJlGMPxzuh4R7lhQu1jNmEtaHX5wvTgWLDAmbGQ==",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^2.0.0",
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/core-util/node_modules/@azure/abort-controller": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-2.1.1.tgz",
|
||||
"integrity": "sha512-NhzeNm5zu2fPlwGXPUjzsRCRuPx5demaZyNcyNYJDqpa/Sbxzvo/RYt9IwUaAOnDW5+r7J9UOE6f22TQnb9nhQ==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/logger": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.1.1.tgz",
|
||||
"integrity": "sha512-/+4TtokaGgC+MnThdf6HyIH9Wrjp+CnCn3Nx3ggevN7FFjjNyjqg0yLlc2i9S+Z2uAzI8GYOo35Nzb1MhQ89MA==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz",
|
||||
"integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==",
|
||||
"dependencies": {
|
||||
"@azure/core-auth": "^1.1.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"form-data": "^2.5.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"tslib": "^1.10.0",
|
||||
"tunnel": "0.0.6",
|
||||
"uuid": "^8.3.2",
|
||||
"xml2js": "^0.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js/node_modules/tslib": {
|
||||
"version": "1.14.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
|
||||
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
|
||||
},
|
||||
"node_modules/@azure/storage-blob": {
|
||||
"version": "12.17.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.17.0.tgz",
|
||||
"integrity": "sha512-sM4vpsCpcCApagRW5UIjQNlNylo02my2opgp0Emi8x888hZUvJ3dN69Oq20cEGXkMUWnoCrBaB0zyS3yeB87sQ==",
|
||||
"dependencies": {
|
||||
"@azure/abort-controller": "^1.0.0",
|
||||
"@azure/core-http": "^3.0.0",
|
||||
"@azure/core-lro": "^2.2.0",
|
||||
"@azure/core-paging": "^1.1.1",
|
||||
"@azure/core-tracing": "1.0.0-preview.13",
|
||||
"@azure/logger": "^1.0.0",
|
||||
"events": "^3.0.0",
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@opentelemetry/api": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.8.0.tgz",
|
||||
"integrity": "sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==",
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "18.14.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.6.tgz",
|
||||
"integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
"integrity": "sha512-93+VvleD3mXwlLI/xASjw0FzKcwzl3OdTCzm1LaRfqgS21gfFtK3zDXM5Op9TeeMsJVOaJ2VRDpT9q4Y3d0AvA=="
|
||||
},
|
||||
"node_modules/@types/node-fetch": {
|
||||
"version": "2.6.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz",
|
||||
"integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node-fetch/node_modules/form-data": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/tunnel": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz",
|
||||
"integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/yauzl": {
|
||||
"version": "2.10.0",
|
||||
@ -55,6 +323,36 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
"integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==",
|
||||
"dependencies": {
|
||||
"event-target-shim": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.5"
|
||||
}
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-crc32": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz",
|
||||
@ -73,6 +371,22 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"dependencies": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
@ -90,6 +404,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/end-of-stream": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
||||
@ -99,6 +421,22 @@
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/event-target-shim": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
|
||||
"integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/events": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
|
||||
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
|
||||
"engines": {
|
||||
"node": ">=0.8.x"
|
||||
}
|
||||
},
|
||||
"node_modules/extract-zip": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz",
|
||||
@ -128,6 +466,19 @@
|
||||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "2.5.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz",
|
||||
"integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.12"
|
||||
}
|
||||
},
|
||||
"node_modules/fs": {
|
||||
"version": "0.0.1-security",
|
||||
"resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz",
|
||||
@ -185,6 +536,36 @@
|
||||
"integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-types": {
|
||||
"version": "2.1.35",
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/minipass": {
|
||||
"version": "4.2.4",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
|
||||
@ -237,6 +618,25 @@
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "4.x || >=6.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"encoding": "^0.1.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"encoding": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
@ -272,7 +672,6 @@
|
||||
"version": "0.11.10",
|
||||
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
|
||||
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6.0"
|
||||
}
|
||||
@ -287,6 +686,19 @@
|
||||
"once": "^1.3.1"
|
||||
}
|
||||
},
|
||||
"node_modules/sax": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz",
|
||||
"integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA=="
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/tar": {
|
||||
"version": "6.1.13",
|
||||
"resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz",
|
||||
@ -304,15 +716,35 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/tr46": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
|
||||
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz",
|
||||
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
|
||||
},
|
||||
"node_modules/tunnel": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
|
||||
}
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "5.28.3",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz",
|
||||
"integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.10.4",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz",
|
||||
@ -326,17 +758,50 @@
|
||||
"version": "8.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"uuid": "dist/bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
|
||||
},
|
||||
"node_modules/whatwg-url": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
||||
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
||||
"dependencies": {
|
||||
"tr46": "~0.0.3",
|
||||
"webidl-conversions": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/xml2js": {
|
||||
"version": "0.5.0",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
|
||||
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
|
||||
"dependencies": {
|
||||
"sax": ">=0.6.0",
|
||||
"xmlbuilder": "~11.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/xmlbuilder": {
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
|
||||
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/yallist": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
||||
|
9
node_modules/@actions/cache/LICENSE.md
generated
vendored
Normal file
9
node_modules/@actions/cache/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
51
node_modules/@actions/cache/README.md
generated
vendored
Normal file
51
node_modules/@actions/cache/README.md
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
# `@actions/cache`
|
||||
|
||||
> Functions necessary for caching dependencies and build outputs to improve workflow execution time.
|
||||
|
||||
See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows) for how caching works.
|
||||
|
||||
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
|
||||
|
||||
## Usage
|
||||
|
||||
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
|
||||
|
||||
#### Save Cache
|
||||
|
||||
Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails.
|
||||
|
||||
```js
|
||||
const cache = require('@actions/cache');
|
||||
const paths = [
|
||||
'node_modules',
|
||||
'packages/*/node_modules/'
|
||||
]
|
||||
const key = 'npm-foobar-d5ea0750'
|
||||
const cacheId = await cache.saveCache(paths, key)
|
||||
```
|
||||
|
||||
#### Restore Cache
|
||||
|
||||
Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found.
|
||||
|
||||
```js
|
||||
const cache = require('@actions/cache');
|
||||
const paths = [
|
||||
'node_modules',
|
||||
'packages/*/node_modules/'
|
||||
]
|
||||
const key = 'npm-foobar-d5ea0750'
|
||||
const restoreKeys = [
|
||||
'npm-foobar-',
|
||||
'npm-'
|
||||
]
|
||||
const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
|
||||
```
|
||||
|
||||
##### Cache segment restore timeout
|
||||
|
||||
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
|
||||
|
||||
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
|
||||
|
||||
|
34
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
Normal file
34
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
import { DownloadOptions, UploadOptions } from './options';
|
||||
export declare class ValidationError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export declare class ReserveCacheError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
/**
|
||||
* isFeatureAvailable to check the presence of Actions cache service
|
||||
*
|
||||
* @returns boolean return true if Actions cache service feature is available, otherwise false
|
||||
*/
|
||||
export declare function isFeatureAvailable(): boolean;
|
||||
/**
|
||||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive?: boolean): Promise<string | undefined>;
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
* @param paths a list of file paths to be cached
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @param options cache upload options
|
||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
export declare function saveCache(paths: string[], key: string, options?: UploadOptions, enableCrossOsArchive?: boolean): Promise<number>;
|
235
node_modules/@actions/cache/lib/cache.js
generated
vendored
Normal file
235
node_modules/@actions/cache/lib/cache.js
generated
vendored
Normal file
@ -0,0 +1,235 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const path = __importStar(require("path"));
|
||||
const utils = __importStar(require("./internal/cacheUtils"));
|
||||
const cacheHttpClient = __importStar(require("./internal/cacheHttpClient"));
|
||||
const tar_1 = require("./internal/tar");
|
||||
class ValidationError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'ValidationError';
|
||||
Object.setPrototypeOf(this, ValidationError.prototype);
|
||||
}
|
||||
}
|
||||
exports.ValidationError = ValidationError;
|
||||
class ReserveCacheError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'ReserveCacheError';
|
||||
Object.setPrototypeOf(this, ReserveCacheError.prototype);
|
||||
}
|
||||
}
|
||||
exports.ReserveCacheError = ReserveCacheError;
|
||||
function checkPaths(paths) {
|
||||
if (!paths || paths.length === 0) {
|
||||
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
|
||||
}
|
||||
}
|
||||
function checkKey(key) {
|
||||
if (key.length > 512) {
|
||||
throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);
|
||||
}
|
||||
const regex = /^[^,]*$/;
|
||||
if (!regex.test(key)) {
|
||||
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* isFeatureAvailable to check the presence of Actions cache service
|
||||
*
|
||||
* @returns boolean return true if Actions cache service feature is available, otherwise false
|
||||
*/
|
||||
function isFeatureAvailable() {
|
||||
return !!process.env['ACTIONS_CACHE_URL'];
|
||||
}
|
||||
exports.isFeatureAvailable = isFeatureAvailable;
|
||||
/**
|
||||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
checkPaths(paths);
|
||||
restoreKeys = restoreKeys || [];
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
core.debug('Resolved Keys:');
|
||||
core.debug(JSON.stringify(keys));
|
||||
if (keys.length > 10) {
|
||||
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
|
||||
}
|
||||
for (const key of keys) {
|
||||
checkKey(key);
|
||||
}
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
let archivePath = '';
|
||||
try {
|
||||
// path are needed to compute version
|
||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
});
|
||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||
// Cache not found
|
||||
return undefined;
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||
core.info('Lookup only - skipping download');
|
||||
return cacheEntry.cacheKey;
|
||||
}
|
||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
// Download the cache from the cache entry
|
||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
||||
if (core.isDebug()) {
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||
core.info('Cache restored successfully');
|
||||
return cacheEntry.cacheKey;
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
}
|
||||
else {
|
||||
// Supress all non-validation cache related errors because caching should be optional
|
||||
core.warning(`Failed to restore: ${error.message}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
exports.restoreCache = restoreCache;
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
* @param paths a list of file paths to be cached
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @param options cache upload options
|
||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
checkPaths(paths);
|
||||
checkKey(key);
|
||||
const compressionMethod = yield utils.getCompressionMethod();
|
||||
let cacheId = -1;
|
||||
const cachePaths = yield utils.resolvePaths(paths);
|
||||
core.debug('Cache Paths:');
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
if (cachePaths.length === 0) {
|
||||
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
|
||||
}
|
||||
const archiveFolder = yield utils.createTempDirectory();
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
try {
|
||||
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
core.debug('Reserving Cache');
|
||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive,
|
||||
cacheSize: archiveFileSize
|
||||
});
|
||||
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
||||
cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId;
|
||||
}
|
||||
else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) {
|
||||
throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`);
|
||||
}
|
||||
else {
|
||||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
}
|
||||
catch (error) {
|
||||
const typedError = error;
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error;
|
||||
}
|
||||
else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
else {
|
||||
core.warning(`Failed to save: ${typedError.message}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
//# sourceMappingURL=cache.js.map
|
1
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts
generated
vendored
Normal file
8
node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { CompressionMethod } from './constants';
|
||||
import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts';
|
||||
import { DownloadOptions, UploadOptions } from '../options';
|
||||
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
|
||||
export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise<ArtifactCacheEntry | null>;
|
||||
export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise<void>;
|
||||
export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise<ITypedResponseWithError<ReserveCacheResponse>>;
|
||||
export declare function saveCache(cacheId: number, archivePath: string, options?: UploadOptions): Promise<void>;
|
262
node_modules/@actions/cache/lib/internal/cacheHttpClient.js
generated
vendored
Normal file
262
node_modules/@actions/cache/lib/internal/cacheHttpClient.js
generated
vendored
Normal file
@ -0,0 +1,262 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
const crypto = __importStar(require("crypto"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const url_1 = require("url");
|
||||
const utils = __importStar(require("./cacheUtils"));
|
||||
const downloadUtils_1 = require("./downloadUtils");
|
||||
const options_1 = require("../options");
|
||||
const requestUtils_1 = require("./requestUtils");
|
||||
const versionSalt = '1.0';
|
||||
function getCacheApiUrl(resource) {
|
||||
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
|
||||
if (!baseUrl) {
|
||||
throw new Error('Cache Service Url not found, unable to restore cache.');
|
||||
}
|
||||
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
||||
core.debug(`Resource Url: ${url}`);
|
||||
return url;
|
||||
}
|
||||
function createAcceptHeader(type, apiVersion) {
|
||||
return `${type};api-version=${apiVersion}`;
|
||||
}
|
||||
function getRequestOptions() {
|
||||
const requestOptions = {
|
||||
headers: {
|
||||
Accept: createAcceptHeader('application/json', '6.0-preview.1')
|
||||
}
|
||||
};
|
||||
return requestOptions;
|
||||
}
|
||||
function createHttpClient() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
|
||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod);
|
||||
}
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||
components.push('windows-only');
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||
// Cache not found
|
||||
if (response.statusCode === 204) {
|
||||
// List cache for primary key only if cache miss occurs
|
||||
if (core.isDebug()) {
|
||||
yield printCachesListForDiagnostics(keys[0], httpClient, version);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||
}
|
||||
const cacheResult = response.result;
|
||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||
if (!cacheDownloadUrl) {
|
||||
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||
throw new Error('Cache not found.');
|
||||
}
|
||||
core.setSecret(cacheDownloadUrl);
|
||||
core.debug(`Cache Result:`);
|
||||
core.debug(JSON.stringify(cacheResult));
|
||||
return cacheResult;
|
||||
});
|
||||
}
|
||||
exports.getCacheEntry = getCacheEntry;
|
||||
function printCachesListForDiagnostics(key, httpClient, version) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const resource = `caches?key=${encodeURIComponent(key)}`;
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||
if (response.statusCode === 200) {
|
||||
const cacheListResult = response.result;
|
||||
const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;
|
||||
if (totalCount && totalCount > 0) {
|
||||
core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`);
|
||||
for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {
|
||||
core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCache = downloadCache;
|
||||
// Reserve Cache
|
||||
function reserveCache(key, paths, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const reserveCacheRequest = {
|
||||
key,
|
||||
version,
|
||||
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
|
||||
};
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
|
||||
}));
|
||||
return response;
|
||||
});
|
||||
}
|
||||
exports.reserveCache = reserveCache;
|
||||
function getContentRange(start, end) {
|
||||
// Format: `bytes start-end/filesize
|
||||
// start and end are inclusive
|
||||
// filesize can be *
|
||||
// For a 200 byte chunk starting at byte 0:
|
||||
// Content-Range: bytes 0-199/*
|
||||
return `bytes ${start}-${end}/*`;
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
};
|
||||
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
||||
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
|
||||
}));
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
|
||||
}
|
||||
});
|
||||
}
|
||||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Upload Chunks
|
||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs.openSync(archivePath, 'r');
|
||||
const uploadOptions = (0, options_1.getUploadOptions)(options);
|
||||
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
|
||||
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
|
||||
const parallelUploads = [...new Array(concurrency).keys()];
|
||||
core.debug('Awaiting all uploads');
|
||||
let offset = 0;
|
||||
try {
|
||||
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
|
||||
while (offset < fileSize) {
|
||||
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += maxChunkSize;
|
||||
yield uploadChunk(httpClient, resourceUrl, () => fs
|
||||
.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
})
|
||||
.on('error', error => {
|
||||
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
|
||||
}), start, end);
|
||||
}
|
||||
})));
|
||||
}
|
||||
finally {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
return;
|
||||
});
|
||||
}
|
||||
function commitCache(httpClient, cacheId, filesize) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const commitCacheRequest = { size: filesize };
|
||||
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
||||
}));
|
||||
});
|
||||
}
|
||||
function saveCache(cacheId, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
core.debug('Upload cache');
|
||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache');
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
||||
}
|
||||
core.info('Cache saved successfully');
|
||||
});
|
||||
}
|
||||
exports.saveCache = saveCache;
|
||||
//# sourceMappingURL=cacheHttpClient.js.map
|
1
node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
12
node_modules/@actions/cache/lib/internal/cacheUtils.d.ts
generated
vendored
Normal file
12
node_modules/@actions/cache/lib/internal/cacheUtils.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
/// <reference types="node" />
|
||||
import * as fs from 'fs';
|
||||
import { CompressionMethod } from './constants';
|
||||
export declare function createTempDirectory(): Promise<string>;
|
||||
export declare function getArchiveFileSizeInBytes(filePath: string): number;
|
||||
export declare function resolvePaths(patterns: string[]): Promise<string[]>;
|
||||
export declare function unlinkFile(filePath: fs.PathLike): Promise<void>;
|
||||
export declare function getCompressionMethod(): Promise<CompressionMethod>;
|
||||
export declare function getCacheFileName(compressionMethod: CompressionMethod): string;
|
||||
export declare function getGnuTarPathOnWindows(): Promise<string>;
|
||||
export declare function assertDefined<T>(name: string, value?: T): T;
|
||||
export declare function isGhes(): boolean;
|
198
node_modules/@actions/cache/lib/internal/cacheUtils.js
generated
vendored
Normal file
198
node_modules/@actions/cache/lib/internal/cacheUtils.js
generated
vendored
Normal file
@ -0,0 +1,198 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const glob = __importStar(require("@actions/glob"));
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const util = __importStar(require("util"));
|
||||
const uuid_1 = require("uuid");
|
||||
const constants_1 = require("./constants");
|
||||
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
|
||||
function createTempDirectory() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
let tempDirectory = process.env['RUNNER_TEMP'] || '';
|
||||
if (!tempDirectory) {
|
||||
let baseLocation;
|
||||
if (IS_WINDOWS) {
|
||||
// On Windows use the USERPROFILE env variable
|
||||
baseLocation = process.env['USERPROFILE'] || 'C:\\';
|
||||
}
|
||||
else {
|
||||
if (process.platform === 'darwin') {
|
||||
baseLocation = '/Users';
|
||||
}
|
||||
else {
|
||||
baseLocation = '/home';
|
||||
}
|
||||
}
|
||||
tempDirectory = path.join(baseLocation, 'actions', 'temp');
|
||||
}
|
||||
const dest = path.join(tempDirectory, (0, uuid_1.v4)());
|
||||
yield io.mkdirP(dest);
|
||||
return dest;
|
||||
});
|
||||
}
|
||||
exports.createTempDirectory = createTempDirectory;
|
||||
function getArchiveFileSizeInBytes(filePath) {
|
||||
return fs.statSync(filePath).size;
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
return paths;
|
||||
});
|
||||
}
|
||||
exports.resolvePaths = resolvePaths;
|
||||
function unlinkFile(filePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return util.promisify(fs.unlink)(filePath);
|
||||
});
|
||||
}
|
||||
exports.unlinkFile = unlinkFile;
|
||||
function getVersion(app, additionalArgs = []) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let versionOutput = '';
|
||||
additionalArgs.push('--version');
|
||||
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
|
||||
try {
|
||||
yield exec.exec(`${app}`, additionalArgs, {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => (versionOutput += data.toString()),
|
||||
stderr: (data) => (versionOutput += data.toString())
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
core.debug(err.message);
|
||||
}
|
||||
versionOutput = versionOutput.trim();
|
||||
core.debug(versionOutput);
|
||||
return versionOutput;
|
||||
});
|
||||
}
|
||||
// Use zstandard if possible to maximize cache performance
|
||||
function getCompressionMethod() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const versionOutput = yield getVersion('zstd', ['--quiet']);
|
||||
const version = semver.clean(versionOutput);
|
||||
core.debug(`zstd version: ${version}`);
|
||||
if (versionOutput === '') {
|
||||
return constants_1.CompressionMethod.Gzip;
|
||||
}
|
||||
else {
|
||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.getCompressionMethod = getCompressionMethod;
|
||||
function getCacheFileName(compressionMethod) {
|
||||
return compressionMethod === constants_1.CompressionMethod.Gzip
|
||||
? constants_1.CacheFilename.Gzip
|
||||
: constants_1.CacheFilename.Zstd;
|
||||
}
|
||||
exports.getCacheFileName = getCacheFileName;
|
||||
function getGnuTarPathOnWindows() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||
return constants_1.GnuTarPathOnWindows;
|
||||
}
|
||||
const versionOutput = yield getVersion('tar');
|
||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
||||
});
|
||||
}
|
||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
||||
function assertDefined(name, value) {
|
||||
if (value === undefined) {
|
||||
throw Error(`Expected ${name} but value was undefiend`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
exports.assertDefined = assertDefined;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
||||
return !isGitHubHost && !isGheHost;
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
//# sourceMappingURL=cacheUtils.js.map
|
1
node_modules/@actions/cache/lib/internal/cacheUtils.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/cacheUtils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,+BAAiC;AACjC,2CAIoB;AAEpB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,IAAA,SAAM,GAAE,CAAC,CAAA;QAC/C,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GACb,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,QAAQ,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAA;IAEtE,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,CAAA;AACpC,CAAC;AAXD,wBAWC"}
|
20
node_modules/@actions/cache/lib/internal/constants.d.ts
generated
vendored
Normal file
20
node_modules/@actions/cache/lib/internal/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
export declare enum CacheFilename {
|
||||
Gzip = "cache.tgz",
|
||||
Zstd = "cache.tzst"
|
||||
}
|
||||
export declare enum CompressionMethod {
|
||||
Gzip = "gzip",
|
||||
ZstdWithoutLong = "zstd-without-long",
|
||||
Zstd = "zstd"
|
||||
}
|
||||
export declare enum ArchiveToolType {
|
||||
GNU = "gnu",
|
||||
BSD = "bsd"
|
||||
}
|
||||
export declare const DefaultRetryAttempts = 2;
|
||||
export declare const DefaultRetryDelay = 5000;
|
||||
export declare const SocketTimeout = 5000;
|
||||
export declare const GnuTarPathOnWindows: string;
|
||||
export declare const SystemTarPathOnWindows: string;
|
||||
export declare const TarFilename = "cache.tar";
|
||||
export declare const ManifestFilename = "manifest.txt";
|
36
node_modules/@actions/cache/lib/internal/constants.js
generated
vendored
Normal file
36
node_modules/@actions/cache/lib/internal/constants.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
|
||||
var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
// Long range mode was added to zstd in v1.3.2.
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
exports.DefaultRetryDelay = 5000;
|
||||
// Socket timeout in milliseconds during download. If no traffic is received
|
||||
// over the socket during this period, the socket is destroyed and the download
|
||||
// is aborted.
|
||||
exports.SocketTimeout = 5000;
|
||||
// The default path of GNUtar on hosted Windows runners
|
||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
||||
// The default path of BSDtar on hosted Windows runners
|
||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||
exports.TarFilename = 'cache.tar';
|
||||
exports.ManifestFilename = 'manifest.txt';
|
||||
//# sourceMappingURL=constants.js.map
|
1
node_modules/@actions/cache/lib/internal/constants.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/constants.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA"}
|
83
node_modules/@actions/cache/lib/internal/downloadUtils.d.ts
generated
vendored
Normal file
83
node_modules/@actions/cache/lib/internal/downloadUtils.d.ts
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
/// <reference types="node" />
|
||||
import { TransferProgressEvent } from '@azure/ms-rest-js';
|
||||
import * as fs from 'fs';
|
||||
import { DownloadOptions } from '../options';
|
||||
/**
|
||||
* Class for tracking the download state and displaying stats.
|
||||
*/
|
||||
export declare class DownloadProgress {
|
||||
contentLength: number;
|
||||
segmentIndex: number;
|
||||
segmentSize: number;
|
||||
segmentOffset: number;
|
||||
receivedBytes: number;
|
||||
startTime: number;
|
||||
displayedComplete: boolean;
|
||||
timeoutHandle?: ReturnType<typeof setTimeout>;
|
||||
constructor(contentLength: number);
|
||||
/**
|
||||
* Progress to the next segment. Only call this method when the previous segment
|
||||
* is complete.
|
||||
*
|
||||
* @param segmentSize the length of the next segment
|
||||
*/
|
||||
nextSegment(segmentSize: number): void;
|
||||
/**
|
||||
* Sets the number of bytes received for the current segment.
|
||||
*
|
||||
* @param receivedBytes the number of bytes received
|
||||
*/
|
||||
setReceivedBytes(receivedBytes: number): void;
|
||||
/**
|
||||
* Returns the total number of bytes transferred.
|
||||
*/
|
||||
getTransferredBytes(): number;
|
||||
/**
|
||||
* Returns true if the download is complete.
|
||||
*/
|
||||
isDone(): boolean;
|
||||
/**
|
||||
* Prints the current download stats. Once the download completes, this will print one
|
||||
* last line and then stop.
|
||||
*/
|
||||
display(): void;
|
||||
/**
|
||||
* Returns a function used to handle TransferProgressEvents.
|
||||
*/
|
||||
onProgress(): (progress: TransferProgressEvent) => void;
|
||||
/**
|
||||
* Starts the timer that displays the stats.
|
||||
*
|
||||
* @param delayInMs the delay between each write
|
||||
*/
|
||||
startDisplayTimer(delayInMs?: number): void;
|
||||
/**
|
||||
* Stops the timer that displays the stats. As this typically indicates the download
|
||||
* is complete, this will display one last line, unless the last line has already
|
||||
* been written.
|
||||
*/
|
||||
stopDisplayTimer(): void;
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
export declare function downloadCacheHttpClient(archiveLocation: string, archivePath: string): Promise<void>;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
export declare function downloadCacheHttpClientConcurrent(archiveLocation: string, archivePath: fs.PathLike, options: DownloadOptions): Promise<void>;
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
* @param options the download options with the defaults set
|
||||
*/
|
||||
export declare function downloadCacheStorageSDK(archiveLocation: string, archivePath: string, options: DownloadOptions): Promise<void>;
|
378
node_modules/@actions/cache/lib/internal/downloadUtils.js
generated
vendored
Normal file
378
node_modules/@actions/cache/lib/internal/downloadUtils.js
generated
vendored
Normal file
@ -0,0 +1,378 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const storage_blob_1 = require("@azure/storage-blob");
|
||||
const buffer = __importStar(require("buffer"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const util = __importStar(require("util"));
|
||||
const utils = __importStar(require("./cacheUtils"));
|
||||
const constants_1 = require("./constants");
|
||||
const requestUtils_1 = require("./requestUtils");
|
||||
const abort_controller_1 = require("@azure/abort-controller");
|
||||
/**
|
||||
* Pipes the body of a HTTP response to a stream
|
||||
*
|
||||
* @param response the HTTP response
|
||||
* @param output the writable stream
|
||||
*/
|
||||
function pipeResponseToStream(response, output) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
yield pipeline(response.message, output);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Class for tracking the download state and displaying stats.
|
||||
*/
|
||||
class DownloadProgress {
|
||||
constructor(contentLength) {
|
||||
this.contentLength = contentLength;
|
||||
this.segmentIndex = 0;
|
||||
this.segmentSize = 0;
|
||||
this.segmentOffset = 0;
|
||||
this.receivedBytes = 0;
|
||||
this.displayedComplete = false;
|
||||
this.startTime = Date.now();
|
||||
}
|
||||
/**
|
||||
* Progress to the next segment. Only call this method when the previous segment
|
||||
* is complete.
|
||||
*
|
||||
* @param segmentSize the length of the next segment
|
||||
*/
|
||||
nextSegment(segmentSize) {
|
||||
this.segmentOffset = this.segmentOffset + this.segmentSize;
|
||||
this.segmentIndex = this.segmentIndex + 1;
|
||||
this.segmentSize = segmentSize;
|
||||
this.receivedBytes = 0;
|
||||
core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
|
||||
}
|
||||
/**
|
||||
* Sets the number of bytes received for the current segment.
|
||||
*
|
||||
* @param receivedBytes the number of bytes received
|
||||
*/
|
||||
setReceivedBytes(receivedBytes) {
|
||||
this.receivedBytes = receivedBytes;
|
||||
}
|
||||
/**
|
||||
* Returns the total number of bytes transferred.
|
||||
*/
|
||||
getTransferredBytes() {
|
||||
return this.segmentOffset + this.receivedBytes;
|
||||
}
|
||||
/**
|
||||
* Returns true if the download is complete.
|
||||
*/
|
||||
isDone() {
|
||||
return this.getTransferredBytes() === this.contentLength;
|
||||
}
|
||||
/**
|
||||
* Prints the current download stats. Once the download completes, this will print one
|
||||
* last line and then stop.
|
||||
*/
|
||||
display() {
|
||||
if (this.displayedComplete) {
|
||||
return;
|
||||
}
|
||||
const transferredBytes = this.segmentOffset + this.receivedBytes;
|
||||
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
|
||||
const elapsedTime = Date.now() - this.startTime;
|
||||
const downloadSpeed = (transferredBytes /
|
||||
(1024 * 1024) /
|
||||
(elapsedTime / 1000)).toFixed(1);
|
||||
core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
|
||||
if (this.isDone()) {
|
||||
this.displayedComplete = true;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a function used to handle TransferProgressEvents.
|
||||
*/
|
||||
onProgress() {
|
||||
return (progress) => {
|
||||
this.setReceivedBytes(progress.loadedBytes);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Starts the timer that displays the stats.
|
||||
*
|
||||
* @param delayInMs the delay between each write
|
||||
*/
|
||||
startDisplayTimer(delayInMs = 1000) {
|
||||
const displayCallback = () => {
|
||||
this.display();
|
||||
if (!this.isDone()) {
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
||||
}
|
||||
};
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
|
||||
}
|
||||
/**
|
||||
* Stops the timer that displays the stats. As this typically indicates the download
|
||||
* is complete, this will display one last line, unless the last line has already
|
||||
* been written.
|
||||
*/
|
||||
stopDisplayTimer() {
|
||||
if (this.timeoutHandle) {
|
||||
clearTimeout(this.timeoutHandle);
|
||||
this.timeoutHandle = undefined;
|
||||
}
|
||||
this.display();
|
||||
}
|
||||
}
|
||||
exports.DownloadProgress = DownloadProgress;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const writeStream = fs.createWriteStream(archivePath);
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache');
|
||||
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
|
||||
// Abort download if no traffic received over the socket.
|
||||
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
||||
downloadResponse.message.destroy();
|
||||
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
||||
});
|
||||
yield pipeResponseToStream(downloadResponse, writeStream);
|
||||
// Validate download size.
|
||||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
||||
if (contentLengthHeader) {
|
||||
const expectedLength = parseInt(contentLengthHeader);
|
||||
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
if (actualLength !== expectedLength) {
|
||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
core.debug('Unable to validate download, no Content-Length header');
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
* @param options the download options with the defaults set
|
||||
*/
|
||||
function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
|
||||
retryOptions: {
|
||||
// Override the timeout used when downloading each 4 MB chunk
|
||||
// The default is 2 min / MB, which is way too slow
|
||||
tryTimeoutInMs: options.timeoutInMs
|
||||
}
|
||||
});
|
||||
const properties = yield client.getProperties();
|
||||
const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
|
||||
if (contentLength < 0) {
|
||||
// We should never hit this condition, but just in case fall back to downloading the
|
||||
// file as one large stream
|
||||
core.debug('Unable to determine content length, downloading file with http-client...');
|
||||
yield downloadCacheHttpClient(archiveLocation, archivePath);
|
||||
}
|
||||
else {
|
||||
// Use downloadToBuffer for faster downloads, since internally it splits the
|
||||
// file into 4 MB chunks which can then be parallelized and retried independently
|
||||
//
|
||||
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
||||
// on 64-bit systems), split the download into multiple segments
|
||||
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
|
||||
// Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast
|
||||
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
|
||||
const downloadProgress = new DownloadProgress(contentLength);
|
||||
const fd = fs.openSync(archivePath, 'w');
|
||||
try {
|
||||
downloadProgress.startDisplayTimer();
|
||||
const controller = new abort_controller_1.AbortController();
|
||||
const abortSignal = controller.signal;
|
||||
while (!downloadProgress.isDone()) {
|
||||
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
||||
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
||||
downloadProgress.nextSegment(segmentSize);
|
||||
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
|
||||
abortSignal,
|
||||
concurrency: options.downloadConcurrency,
|
||||
onProgress: downloadProgress.onProgress()
|
||||
}));
|
||||
if (result === 'timeout') {
|
||||
controller.abort();
|
||||
throw new Error('Aborting cache download as the download time exceeded the timeout.');
|
||||
}
|
||||
else if (Buffer.isBuffer(result)) {
|
||||
fs.writeFileSync(fd, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
downloadProgress.stopDisplayTimer();
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
||||
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
let timeoutHandle;
|
||||
const timeoutPromise = new Promise(resolve => {
|
||||
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
|
||||
});
|
||||
return Promise.race([promise, timeoutPromise]).then(result => {
|
||||
clearTimeout(timeoutHandle);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=downloadUtils.js.map
|
1
node_modules/@actions/cache/lib/internal/downloadUtils.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/downloadUtils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
node_modules/@actions/cache/lib/internal/requestUtils.d.ts
generated
vendored
Normal file
8
node_modules/@actions/cache/lib/internal/requestUtils.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { HttpClientResponse } from '@actions/http-client';
|
||||
import { ITypedResponseWithError } from './contracts';
|
||||
export declare function isSuccessStatusCode(statusCode?: number): boolean;
|
||||
export declare function isServerErrorStatusCode(statusCode?: number): boolean;
|
||||
export declare function isRetryableStatusCode(statusCode?: number): boolean;
|
||||
export declare function retry<T>(name: string, method: () => Promise<T>, getStatusCode: (arg0: T) => number | undefined, maxAttempts?: number, delay?: number, onError?: ((arg0: Error) => T | undefined) | undefined): Promise<T>;
|
||||
export declare function retryTypedResponse<T>(name: string, method: () => Promise<ITypedResponseWithError<T>>, maxAttempts?: number, delay?: number): Promise<ITypedResponseWithError<T>>;
|
||||
export declare function retryHttpClientResponse(name: string, method: () => Promise<HttpClientResponse>, maxAttempts?: number, delay?: number): Promise<HttpClientResponse>;
|
137
node_modules/@actions/cache/lib/internal/requestUtils.js
generated
vendored
Normal file
137
node_modules/@actions/cache/lib/internal/requestUtils.js
generated
vendored
Normal file
@ -0,0 +1,137 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const constants_1 = require("./constants");
|
||||
function isSuccessStatusCode(statusCode) {
|
||||
if (!statusCode) {
|
||||
return false;
|
||||
}
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
exports.isSuccessStatusCode = isSuccessStatusCode;
|
||||
function isServerErrorStatusCode(statusCode) {
|
||||
if (!statusCode) {
|
||||
return true;
|
||||
}
|
||||
return statusCode >= 500;
|
||||
}
|
||||
exports.isServerErrorStatusCode = isServerErrorStatusCode;
|
||||
function isRetryableStatusCode(statusCode) {
|
||||
if (!statusCode) {
|
||||
return false;
|
||||
}
|
||||
const retryableStatusCodes = [
|
||||
http_client_1.HttpCodes.BadGateway,
|
||||
http_client_1.HttpCodes.ServiceUnavailable,
|
||||
http_client_1.HttpCodes.GatewayTimeout
|
||||
];
|
||||
return retryableStatusCodes.includes(statusCode);
|
||||
}
|
||||
exports.isRetryableStatusCode = isRetryableStatusCode;
|
||||
function sleep(milliseconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
});
|
||||
}
|
||||
function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let errorMessage = '';
|
||||
let attempt = 1;
|
||||
while (attempt <= maxAttempts) {
|
||||
let response = undefined;
|
||||
let statusCode = undefined;
|
||||
let isRetryable = false;
|
||||
try {
|
||||
response = yield method();
|
||||
}
|
||||
catch (error) {
|
||||
if (onError) {
|
||||
response = onError(error);
|
||||
}
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (response) {
|
||||
statusCode = getStatusCode(response);
|
||||
if (!isServerErrorStatusCode(statusCode)) {
|
||||
return response;
|
||||
}
|
||||
}
|
||||
if (statusCode) {
|
||||
isRetryable = isRetryableStatusCode(statusCode);
|
||||
errorMessage = `Cache service responded with ${statusCode}`;
|
||||
}
|
||||
core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
|
||||
if (!isRetryable) {
|
||||
core.debug(`${name} - Error is not retryable`);
|
||||
break;
|
||||
}
|
||||
yield sleep(delay);
|
||||
attempt++;
|
||||
}
|
||||
throw Error(`${name} failed: ${errorMessage}`);
|
||||
});
|
||||
}
|
||||
exports.retry = retry;
|
||||
function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay,
|
||||
// If the error object contains the statusCode property, extract it and return
|
||||
// an TypedResponse<T> so it can be processed by the retry logic.
|
||||
(error) => {
|
||||
if (error instanceof http_client_1.HttpClientError) {
|
||||
return {
|
||||
statusCode: error.statusCode,
|
||||
result: null,
|
||||
headers: {},
|
||||
error
|
||||
};
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.retryTypedResponse = retryTypedResponse;
|
||||
function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay);
|
||||
});
|
||||
}
|
||||
exports.retryHttpClientResponse = retryHttpClientResponse;
|
||||
//# sourceMappingURL=requestUtils.js.map
|
1
node_modules/@actions/cache/lib/internal/requestUtils.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/requestUtils.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAI6B;AAC7B,2CAAmE;AAGnE,SAAgB,mBAAmB,CAAC,UAAmB;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;AAC9C,CAAC;AALD,kDAKC;AAED,SAAgB,uBAAuB,CAAC,UAAmB;IACzD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAA;KACZ;IACD,OAAO,UAAU,IAAI,GAAG,CAAA;AAC1B,CAAC;AALD,0DAKC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,MAAM,oBAAoB,GAAG;QAC3B,uBAAS,CAAC,UAAU;QACpB,uBAAS,CAAC,kBAAkB;QAC5B,uBAAS,CAAC,cAAc;KACzB,CAAA;IACD,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;AAClD,CAAC;AAVD,sDAUC;AAED,SAAe,KAAK,CAAC,YAAoB;;QACvC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;IAClE,CAAC;CAAA;AAED,SAAsB,KAAK,CACzB,IAAY,EACZ,MAAwB,EACxB,aAA8C,EAC9C,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB,EACzB,UAAwD,SAAS;;QAEjE,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI,QAAQ,GAAkB,SAAS,CAAA;YACvC,IAAI,UAAU,GAAuB,SAAS,CAAA;YAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;YAEvB,IAAI;gBACF,QAAQ,GAAG,MAAM,MAAM,EAAE,CAAA;aAC1B;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,OAAO,EAAE;oBACX,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;iBAC1B;gBAED,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,QAAQ,EAAE;gBACZ,UAAU,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAA;gBAEpC,IAAI,CAAC,uBAAuB,CAAC,UAAU,CAAC,EAAE;oBACxC,OAAO,QAAQ,CAAA;iBAChB;aACF;YAED,IAAI,UAAU,EAAE;gBACd,WAAW,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,gCAAgC,UAAU,EAAE,CAAA;aAC5D;YAED,IAAI,CAAC,KAAK,CACR,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC9C,MAAK;aACN;YAED,MAAM,KAAK,CAAC,KAAK,CAAC,CAAA;YAClB,OAAO,EAAE,CAAA;SACV;QAED,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AAtDD,sBAsDC;AAED,SAAsB,kBAAkB,CACtC,IAAY,EACZ,MAAiD,EACjD,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAAoC,EAAE,EAAE,CAAC,QAAQ,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK;QACL,8EAA8E;QAC9E,iEAAiE;QACjE,CAAC,KAAY,EAAE,EAAE;YACf,IAAI,KAAK,YAAY,6BAAe,EAAE;gBACpC,OAAO;oBACL,UAAU,EAAE,KAAK,CAAC,UAAU;oBAC5B,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;oBACX,KAAK;iBACN,CAAA;aACF;iBAAM;gBACL,OAAO,SAAS,CAAA;aACjB;QACH,CAAC,CACF,CAAA;IACH,CAAC;CAAA;AA3BD,gDA2BC;AAED,SAAsB,uBAAuB,CAC3C,IAAY,EACZ,MAAyC,EACzC,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAA4B,EAAE,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK,CACN,CAAA;IACH,CAAC;CAAA;AAbD,0DAaC"}
|
4
node_modules/@actions/cache/lib/internal/tar.d.ts
generated
vendored
Normal file
4
node_modules/@actions/cache/lib/internal/tar.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import { CompressionMethod } from './constants';
|
||||
export declare function listTar(archivePath: string, compressionMethod: CompressionMethod): Promise<void>;
|
||||
export declare function extractTar(archivePath: string, compressionMethod: CompressionMethod): Promise<void>;
|
||||
export declare function createTar(archiveFolder: string, sourceDirectories: string[], compressionMethod: CompressionMethod): Promise<void>;
|
272
node_modules/@actions/cache/lib/internal/tar.js
generated
vendored
Normal file
272
node_modules/@actions/cache/lib/internal/tar.js
generated
vendored
Normal file
@ -0,0 +1,272 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createTar = exports.extractTar = exports.listTar = void 0;
|
||||
const exec_1 = require("@actions/exec");
|
||||
const io = __importStar(require("@actions/io"));
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
const utils = __importStar(require("./cacheUtils"));
|
||||
const constants_1 = require("./constants");
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
// Returns tar path and type: BSD or GNU
|
||||
function getTarPath() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
switch (process.platform) {
|
||||
case 'win32': {
|
||||
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
||||
const systemTar = constants_1.SystemTarPathOnWindows;
|
||||
if (gnuTar) {
|
||||
// Use GNUtar as default on windows
|
||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||
}
|
||||
else if ((0, fs_1.existsSync)(systemTar)) {
|
||||
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'darwin': {
|
||||
const gnuTar = yield io.which('gtar', false);
|
||||
if (gnuTar) {
|
||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
||||
}
|
||||
else {
|
||||
return {
|
||||
path: yield io.which('tar', true),
|
||||
type: constants_1.ArchiveToolType.BSD
|
||||
};
|
||||
}
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
// Default assumption is GNU tar is present in path
|
||||
return {
|
||||
path: yield io.which('tar', true),
|
||||
type: constants_1.ArchiveToolType.GNU
|
||||
};
|
||||
});
|
||||
}
|
||||
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
||||
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const args = [`"${tarPath.path}"`];
|
||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||
const tarFile = 'cache.tar';
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
// Speficic args for BSD tar on windows for workaround
|
||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||
IS_WINDOWS;
|
||||
// Method specific args
|
||||
switch (type) {
|
||||
case 'create':
|
||||
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
||||
? tarFile
|
||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
||||
? tarFile
|
||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
||||
break;
|
||||
case 'extract':
|
||||
args.push('-xf', BSD_TAR_ZSTD
|
||||
? tarFile
|
||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
||||
break;
|
||||
case 'list':
|
||||
args.push('-tf', BSD_TAR_ZSTD
|
||||
? tarFile
|
||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
||||
break;
|
||||
}
|
||||
// Platform specific args
|
||||
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
||||
switch (process.platform) {
|
||||
case 'win32':
|
||||
args.push('--force-local');
|
||||
break;
|
||||
case 'darwin':
|
||||
args.push('--delay-directory-restore');
|
||||
break;
|
||||
}
|
||||
}
|
||||
return args;
|
||||
});
|
||||
}
|
||||
// Returns commands to run tar and compression program
|
||||
function getCommands(compressionMethod, type, archivePath = '') {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let args;
|
||||
const tarPath = yield getTarPath();
|
||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
||||
const compressionArgs = type !== 'create'
|
||||
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
||||
: yield getCompressionProgram(tarPath, compressionMethod);
|
||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||
IS_WINDOWS;
|
||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
||||
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
|
||||
}
|
||||
else {
|
||||
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
|
||||
}
|
||||
if (BSD_TAR_ZSTD) {
|
||||
return args;
|
||||
}
|
||||
return [args.join(' ')];
|
||||
});
|
||||
}
|
||||
function getWorkingDirectory() {
|
||||
var _a;
|
||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||
}
|
||||
// Common function for extractTar and listTar to get the compression method
|
||||
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// -d: Decompress.
|
||||
// unzstd is equivalent to 'zstd -d'
|
||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||
IS_WINDOWS;
|
||||
switch (compressionMethod) {
|
||||
case constants_1.CompressionMethod.Zstd:
|
||||
return BSD_TAR_ZSTD
|
||||
? [
|
||||
'zstd -d --long=30 --force -o',
|
||||
constants_1.TarFilename,
|
||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||
]
|
||||
: [
|
||||
'--use-compress-program',
|
||||
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
||||
];
|
||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||
return BSD_TAR_ZSTD
|
||||
? [
|
||||
'zstd -d --force -o',
|
||||
constants_1.TarFilename,
|
||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||
]
|
||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
||||
default:
|
||||
return ['-z'];
|
||||
}
|
||||
});
|
||||
}
|
||||
// Used for creating the archive
|
||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||
// zstdmt is equivalent to 'zstd -T0'
|
||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||
function getCompressionProgram(tarPath, compressionMethod) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
||||
IS_WINDOWS;
|
||||
switch (compressionMethod) {
|
||||
case constants_1.CompressionMethod.Zstd:
|
||||
return BSD_TAR_ZSTD
|
||||
? [
|
||||
'zstd -T0 --long=30 --force -o',
|
||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||
constants_1.TarFilename
|
||||
]
|
||||
: [
|
||||
'--use-compress-program',
|
||||
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
||||
];
|
||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||
return BSD_TAR_ZSTD
|
||||
? [
|
||||
'zstd -T0 --force -o',
|
||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||
constants_1.TarFilename
|
||||
]
|
||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
||||
default:
|
||||
return ['-z'];
|
||||
}
|
||||
});
|
||||
}
|
||||
// Executes all commands as separate processes
|
||||
function execCommands(commands, cwd) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
for (const command of commands) {
|
||||
try {
|
||||
yield (0, exec_1.exec)(command, undefined, {
|
||||
cwd,
|
||||
env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' })
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
// List the contents of a tar
|
||||
function listTar(archivePath, compressionMethod) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const commands = yield getCommands(compressionMethod, 'list', archivePath);
|
||||
yield execCommands(commands);
|
||||
});
|
||||
}
|
||||
exports.listTar = listTar;
|
||||
// Extract a tar
|
||||
function extractTar(archivePath, compressionMethod) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Create directory to extract tar into
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
yield io.mkdirP(workingDirectory);
|
||||
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
|
||||
yield execCommands(commands);
|
||||
});
|
||||
}
|
||||
exports.extractTar = extractTar;
|
||||
// Create a tar
|
||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Write source directories to manifest.txt to avoid command length limits
|
||||
(0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
||||
const commands = yield getCommands(compressionMethod, 'create');
|
||||
yield execCommands(commands, archiveFolder);
|
||||
});
|
||||
}
|
||||
exports.createTar = createTar;
|
||||
//# sourceMappingURL=tar.js.map
|
1
node_modules/@actions/cache/lib/internal/tar.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/internal/tar.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
75
node_modules/@actions/cache/lib/options.d.ts
generated
vendored
Normal file
75
node_modules/@actions/cache/lib/options.d.ts
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
/**
|
||||
* Options to control cache upload
|
||||
*/
|
||||
export interface UploadOptions {
|
||||
/**
|
||||
* Number of parallel cache upload
|
||||
*
|
||||
* @default 4
|
||||
*/
|
||||
uploadConcurrency?: number;
|
||||
/**
|
||||
* Maximum chunk size in bytes for cache upload
|
||||
*
|
||||
* @default 32MB
|
||||
*/
|
||||
uploadChunkSize?: number;
|
||||
}
|
||||
/**
|
||||
* Options to control cache download
|
||||
*/
|
||||
export interface DownloadOptions {
|
||||
/**
|
||||
* Indicates whether to use the Azure Blob SDK to download caches
|
||||
* that are stored on Azure Blob Storage to improve reliability and
|
||||
* performance
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
useAzureSdk?: boolean;
|
||||
/**
|
||||
* Number of parallel downloads (this option only applies when using
|
||||
* the Azure SDK)
|
||||
*
|
||||
* @default 8
|
||||
*/
|
||||
downloadConcurrency?: number;
|
||||
/**
|
||||
* Indicates whether to use Actions HttpClient with concurrency
|
||||
* for Azure Blob Storage
|
||||
*/
|
||||
concurrentBlobDownloads?: boolean;
|
||||
/**
|
||||
* Maximum time for each download request, in milliseconds (this
|
||||
* option only applies when using the Azure SDK)
|
||||
*
|
||||
* @default 30000
|
||||
*/
|
||||
timeoutInMs?: number;
|
||||
/**
|
||||
* Time after which a segment download should be aborted if stuck
|
||||
*
|
||||
* @default 3600000
|
||||
*/
|
||||
segmentTimeoutInMs?: number;
|
||||
/**
|
||||
* Weather to skip downloading the cache entry.
|
||||
* If lookupOnly is set to true, the restore function will only check if
|
||||
* a matching cache entry exists and return the cache key if it does.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
lookupOnly?: boolean;
|
||||
}
|
||||
/**
|
||||
* Returns a copy of the upload options with defaults filled in.
|
||||
*
|
||||
* @param copy the original upload options
|
||||
*/
|
||||
export declare function getUploadOptions(copy?: UploadOptions): UploadOptions;
|
||||
/**
|
||||
* Returns a copy of the download options with defaults filled in.
|
||||
*
|
||||
* @param copy the original download options
|
||||
*/
|
||||
export declare function getDownloadOptions(copy?: DownloadOptions): DownloadOptions;
|
100
node_modules/@actions/cache/lib/options.js
generated
vendored
Normal file
100
node_modules/@actions/cache/lib/options.js
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getDownloadOptions = exports.getUploadOptions = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
/**
|
||||
* Returns a copy of the upload options with defaults filled in.
|
||||
*
|
||||
* @param copy the original upload options
|
||||
*/
|
||||
function getUploadOptions(copy) {
|
||||
const result = {
|
||||
uploadConcurrency: 4,
|
||||
uploadChunkSize: 32 * 1024 * 1024
|
||||
};
|
||||
if (copy) {
|
||||
if (typeof copy.uploadConcurrency === 'number') {
|
||||
result.uploadConcurrency = copy.uploadConcurrency;
|
||||
}
|
||||
if (typeof copy.uploadChunkSize === 'number') {
|
||||
result.uploadChunkSize = copy.uploadChunkSize;
|
||||
}
|
||||
}
|
||||
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
|
||||
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);
|
||||
return result;
|
||||
}
|
||||
exports.getUploadOptions = getUploadOptions;
|
||||
/**
|
||||
* Returns a copy of the download options with defaults filled in.
|
||||
*
|
||||
* @param copy the original download options
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
lookupOnly: false
|
||||
};
|
||||
if (copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
if (typeof copy.timeoutInMs === 'number') {
|
||||
result.timeoutInMs = copy.timeoutInMs;
|
||||
}
|
||||
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||
}
|
||||
if (typeof copy.lookupOnly === 'boolean') {
|
||||
result.lookupOnly = copy.lookupOnly;
|
||||
}
|
||||
}
|
||||
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
||||
if (segmentDownloadTimeoutMins &&
|
||||
!isNaN(Number(segmentDownloadTimeoutMins)) &&
|
||||
isFinite(Number(segmentDownloadTimeoutMins))) {
|
||||
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
|
||||
}
|
||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
||||
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
||||
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||
core.debug(`Lookup only: ${result.lookupOnly}`);
|
||||
return result;
|
||||
}
|
||||
exports.getDownloadOptions = getDownloadOptions;
|
||||
//# sourceMappingURL=options.js.map
|
1
node_modules/@actions/cache/lib/options.js.map
generated
vendored
Normal file
1
node_modules/@actions/cache/lib/options.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAwErC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,MAAM,MAAM,GAAkB;QAC5B,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AApBD,4CAoBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"}
|
1
node_modules/@actions/cache/node_modules/.bin/uuid
generated
vendored
Symbolic link
1
node_modules/@actions/cache/node_modules/.bin/uuid
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../uuid/bin/uuid
|
5
node_modules/@actions/cache/node_modules/uuid/AUTHORS
generated
vendored
Normal file
5
node_modules/@actions/cache/node_modules/uuid/AUTHORS
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
Robert Kieffer <robert@broofa.com>
|
||||
Christoph Tavan <dev@tavan.de>
|
||||
AJ ONeal <coolaj86@gmail.com>
|
||||
Vincent Voyer <vincent@zeroload.net>
|
||||
Roman Shtylman <shtylman@gmail.com>
|
119
node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md
generated
vendored
Normal file
119
node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,119 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338)
|
||||
|
||||
### [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19)
|
||||
|
||||
<a name="3.3.2"></a>
|
||||
## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877))
|
||||
|
||||
|
||||
|
||||
<a name="3.3.1"></a>
|
||||
## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2))
|
||||
|
||||
|
||||
|
||||
<a name="3.3.0"></a>
|
||||
# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc))
|
||||
* fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4))
|
||||
* Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331))
|
||||
* mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c))
|
||||
|
||||
### Features
|
||||
|
||||
* enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182))
|
||||
|
||||
|
||||
<a name="3.2.1"></a>
|
||||
## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b))
|
||||
|
||||
|
||||
|
||||
<a name="3.2.0"></a>
|
||||
# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824))
|
||||
* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726))
|
||||
|
||||
|
||||
# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183)
|
||||
* Fix typo (#178)
|
||||
* Simple typo fix (#165)
|
||||
|
||||
### Features
|
||||
* v5 support in CLI (#197)
|
||||
* V5 support (#188)
|
||||
|
||||
|
||||
# 3.0.1 (2016-11-28)
|
||||
|
||||
* split uuid versions into separate files
|
||||
|
||||
|
||||
# 3.0.0 (2016-11-17)
|
||||
|
||||
* remove .parse and .unparse
|
||||
|
||||
|
||||
# 2.0.0
|
||||
|
||||
* Removed uuid.BufferClass
|
||||
|
||||
|
||||
# 1.4.0
|
||||
|
||||
* Improved module context detection
|
||||
* Removed public RNG functions
|
||||
|
||||
|
||||
# 1.3.2
|
||||
|
||||
* Improve tests and handling of v1() options (Issue #24)
|
||||
* Expose RNG option to allow for perf testing with different generators
|
||||
|
||||
|
||||
# 1.3.0
|
||||
|
||||
* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)!
|
||||
* Support for node.js crypto API
|
||||
* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code
|
21
node_modules/@actions/cache/node_modules/uuid/LICENSE.md
generated
vendored
Normal file
21
node_modules/@actions/cache/node_modules/uuid/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2010-2016 Robert Kieffer and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
276
node_modules/@actions/cache/node_modules/uuid/README.md
generated
vendored
Normal file
276
node_modules/@actions/cache/node_modules/uuid/README.md
generated
vendored
Normal file
@ -0,0 +1,276 @@
|
||||
<!--
|
||||
-- This file is auto-generated from README_js.md. Changes should be made there.
|
||||
-->
|
||||
|
||||
# uuid [](http://travis-ci.org/kelektiv/node-uuid) #
|
||||
|
||||
Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS.
|
||||
|
||||
Features:
|
||||
|
||||
* Support for version 1, 3, 4 and 5 UUIDs
|
||||
* Cross-platform
|
||||
* Uses cryptographically-strong random number APIs (when available)
|
||||
* Zero-dependency, small footprint (... but not [this small](https://gist.github.com/982883))
|
||||
|
||||
[**Deprecation warning**: The use of `require('uuid')` is deprecated and will not be
|
||||
supported after version 3.x of this module. Instead, use `require('uuid/[v1|v3|v4|v5]')` as shown in the examples below.]
|
||||
|
||||
## Quickstart - CommonJS (Recommended)
|
||||
|
||||
```shell
|
||||
npm install uuid
|
||||
```
|
||||
|
||||
Then generate your uuid version of choice ...
|
||||
|
||||
Version 1 (timestamp):
|
||||
|
||||
```javascript
|
||||
const uuidv1 = require('uuid/v1');
|
||||
uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d'
|
||||
|
||||
```
|
||||
|
||||
Version 3 (namespace):
|
||||
|
||||
```javascript
|
||||
const uuidv3 = require('uuid/v3');
|
||||
|
||||
// ... using predefined DNS namespace (for domain names)
|
||||
uuidv3('hello.example.com', uuidv3.DNS); // ⇨ '9125a8dc-52ee-365b-a5aa-81b0b3681cf6'
|
||||
|
||||
// ... using predefined URL namespace (for, well, URLs)
|
||||
uuidv3('http://example.com/hello', uuidv3.URL); // ⇨ 'c6235813-3ba4-3801-ae84-e0a6ebb7d138'
|
||||
|
||||
// ... using a custom namespace
|
||||
//
|
||||
// Note: Custom namespaces should be a UUID string specific to your application!
|
||||
// E.g. the one here was generated using this modules `uuid` CLI.
|
||||
const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341';
|
||||
uuidv3('Hello, World!', MY_NAMESPACE); // ⇨ 'e8b5a51d-11c8-3310-a6ab-367563f20686'
|
||||
|
||||
```
|
||||
|
||||
Version 4 (random):
|
||||
|
||||
```javascript
|
||||
const uuidv4 = require('uuid/v4');
|
||||
uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed'
|
||||
|
||||
```
|
||||
|
||||
Version 5 (namespace):
|
||||
|
||||
```javascript
|
||||
const uuidv5 = require('uuid/v5');
|
||||
|
||||
// ... using predefined DNS namespace (for domain names)
|
||||
uuidv5('hello.example.com', uuidv5.DNS); // ⇨ 'fdda765f-fc57-5604-a269-52a7df8164ec'
|
||||
|
||||
// ... using predefined URL namespace (for, well, URLs)
|
||||
uuidv5('http://example.com/hello', uuidv5.URL); // ⇨ '3bbcee75-cecc-5b56-8031-b6641c1ed1f1'
|
||||
|
||||
// ... using a custom namespace
|
||||
//
|
||||
// Note: Custom namespaces should be a UUID string specific to your application!
|
||||
// E.g. the one here was generated using this modules `uuid` CLI.
|
||||
const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341';
|
||||
uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681'
|
||||
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### Version 1
|
||||
|
||||
```javascript
|
||||
const uuidv1 = require('uuid/v1');
|
||||
|
||||
// Incantations
|
||||
uuidv1();
|
||||
uuidv1(options);
|
||||
uuidv1(options, buffer, offset);
|
||||
```
|
||||
|
||||
Generate and return a RFC4122 v1 (timestamp-based) UUID.
|
||||
|
||||
* `options` - (Object) Optional uuid state to apply. Properties may include:
|
||||
|
||||
* `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1.
|
||||
* `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used.
|
||||
* `msecs` - (Number) Time in milliseconds since unix Epoch. Default: The current time is used.
|
||||
* `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2.
|
||||
|
||||
* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written.
|
||||
* `offset` - (Number) Starting index in `buffer` at which to begin writing.
|
||||
|
||||
Returns `buffer`, if specified, otherwise the string form of the UUID
|
||||
|
||||
Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process.
|
||||
|
||||
Example: Generate string UUID with fully-specified options
|
||||
|
||||
```javascript
|
||||
const v1options = {
|
||||
node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab],
|
||||
clockseq: 0x1234,
|
||||
msecs: new Date('2011-11-01').getTime(),
|
||||
nsecs: 5678
|
||||
};
|
||||
uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab'
|
||||
|
||||
```
|
||||
|
||||
Example: In-place generation of two binary IDs
|
||||
|
||||
```javascript
|
||||
// Generate two ids in an array
|
||||
const arr = new Array();
|
||||
uuidv1(null, arr, 0); // ⇨
|
||||
// [
|
||||
// 44, 94, 164, 192, 64, 103,
|
||||
// 17, 233, 146, 52, 155, 29,
|
||||
// 235, 77, 59, 125
|
||||
// ]
|
||||
uuidv1(null, arr, 16); // ⇨
|
||||
// [
|
||||
// 44, 94, 164, 192, 64, 103, 17, 233,
|
||||
// 146, 52, 155, 29, 235, 77, 59, 125,
|
||||
// 44, 94, 164, 193, 64, 103, 17, 233,
|
||||
// 146, 52, 155, 29, 235, 77, 59, 125
|
||||
// ]
|
||||
|
||||
```
|
||||
|
||||
### Version 3
|
||||
|
||||
```javascript
|
||||
const uuidv3 = require('uuid/v3');
|
||||
|
||||
// Incantations
|
||||
uuidv3(name, namespace);
|
||||
uuidv3(name, namespace, buffer);
|
||||
uuidv3(name, namespace, buffer, offset);
|
||||
```
|
||||
|
||||
Generate and return a RFC4122 v3 UUID.
|
||||
|
||||
* `name` - (String | Array[]) "name" to create UUID with
|
||||
* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values
|
||||
* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written.
|
||||
* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0
|
||||
|
||||
Returns `buffer`, if specified, otherwise the string form of the UUID
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
uuidv3('hello world', MY_NAMESPACE); // ⇨ '042ffd34-d989-321c-ad06-f60826172424'
|
||||
|
||||
```
|
||||
|
||||
### Version 4
|
||||
|
||||
```javascript
|
||||
const uuidv4 = require('uuid/v4')
|
||||
|
||||
// Incantations
|
||||
uuidv4();
|
||||
uuidv4(options);
|
||||
uuidv4(options, buffer, offset);
|
||||
```
|
||||
|
||||
Generate and return a RFC4122 v4 UUID.
|
||||
|
||||
* `options` - (Object) Optional uuid state to apply. Properties may include:
|
||||
* `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values
|
||||
* `rng` - (Function) Random # generator function that returns an Array[16] of byte values (0-255)
|
||||
* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written.
|
||||
* `offset` - (Number) Starting index in `buffer` at which to begin writing.
|
||||
|
||||
Returns `buffer`, if specified, otherwise the string form of the UUID
|
||||
|
||||
Example: Generate string UUID with predefined `random` values
|
||||
|
||||
```javascript
|
||||
const v4options = {
|
||||
random: [
|
||||
0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea,
|
||||
0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36
|
||||
]
|
||||
};
|
||||
uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836'
|
||||
|
||||
```
|
||||
|
||||
Example: Generate two IDs in a single buffer
|
||||
|
||||
```javascript
|
||||
const buffer = new Array();
|
||||
uuidv4(null, buffer, 0); // ⇨
|
||||
// [
|
||||
// 155, 29, 235, 77, 59,
|
||||
// 125, 75, 173, 155, 221,
|
||||
// 43, 13, 123, 61, 203,
|
||||
// 109
|
||||
// ]
|
||||
uuidv4(null, buffer, 16); // ⇨
|
||||
// [
|
||||
// 155, 29, 235, 77, 59, 125, 75, 173,
|
||||
// 155, 221, 43, 13, 123, 61, 203, 109,
|
||||
// 27, 157, 107, 205, 187, 253, 75, 45,
|
||||
// 155, 93, 171, 141, 251, 189, 75, 237
|
||||
// ]
|
||||
|
||||
```
|
||||
|
||||
### Version 5
|
||||
|
||||
```javascript
|
||||
const uuidv5 = require('uuid/v5');
|
||||
|
||||
// Incantations
|
||||
uuidv5(name, namespace);
|
||||
uuidv5(name, namespace, buffer);
|
||||
uuidv5(name, namespace, buffer, offset);
|
||||
```
|
||||
|
||||
Generate and return a RFC4122 v5 UUID.
|
||||
|
||||
* `name` - (String | Array[]) "name" to create UUID with
|
||||
* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values
|
||||
* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written.
|
||||
* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0
|
||||
|
||||
Returns `buffer`, if specified, otherwise the string form of the UUID
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
uuidv5('hello world', MY_NAMESPACE); // ⇨ '9f282611-e0fd-5650-8953-89c8e342da0b'
|
||||
|
||||
```
|
||||
|
||||
## Command Line
|
||||
|
||||
UUIDs can be generated from the command line with the `uuid` command.
|
||||
|
||||
```shell
|
||||
$ uuid
|
||||
ddeb27fb-d9a0-4624-be4d-4615062daed4
|
||||
|
||||
$ uuid v1
|
||||
02d37060-d446-11e7-a9fa-7bdae751ebe1
|
||||
```
|
||||
|
||||
Type `uuid --help` for usage details
|
||||
|
||||
## Testing
|
||||
|
||||
```shell
|
||||
npm test
|
||||
```
|
||||
|
||||
----
|
||||
Markdown generated from [README_js.md](README_js.md) by [](https://github.com/broofa/runmd)
|
65
node_modules/@actions/cache/node_modules/uuid/bin/uuid
generated
vendored
Executable file
65
node_modules/@actions/cache/node_modules/uuid/bin/uuid
generated
vendored
Executable file
@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env node
|
||||
var assert = require('assert');
|
||||
|
||||
function usage() {
|
||||
console.log('Usage:');
|
||||
console.log(' uuid');
|
||||
console.log(' uuid v1');
|
||||
console.log(' uuid v3 <name> <namespace uuid>');
|
||||
console.log(' uuid v4');
|
||||
console.log(' uuid v5 <name> <namespace uuid>');
|
||||
console.log(' uuid --help');
|
||||
console.log('\nNote: <namespace uuid> may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122');
|
||||
}
|
||||
|
||||
var args = process.argv.slice(2);
|
||||
|
||||
if (args.indexOf('--help') >= 0) {
|
||||
usage();
|
||||
process.exit(0);
|
||||
}
|
||||
var version = args.shift() || 'v4';
|
||||
|
||||
switch (version) {
|
||||
case 'v1':
|
||||
var uuidV1 = require('../v1');
|
||||
console.log(uuidV1());
|
||||
break;
|
||||
|
||||
case 'v3':
|
||||
var uuidV3 = require('../v3');
|
||||
|
||||
var name = args.shift();
|
||||
var namespace = args.shift();
|
||||
assert(name != null, 'v3 name not specified');
|
||||
assert(namespace != null, 'v3 namespace not specified');
|
||||
|
||||
if (namespace == 'URL') namespace = uuidV3.URL;
|
||||
if (namespace == 'DNS') namespace = uuidV3.DNS;
|
||||
|
||||
console.log(uuidV3(name, namespace));
|
||||
break;
|
||||
|
||||
case 'v4':
|
||||
var uuidV4 = require('../v4');
|
||||
console.log(uuidV4());
|
||||
break;
|
||||
|
||||
case 'v5':
|
||||
var uuidV5 = require('../v5');
|
||||
|
||||
var name = args.shift();
|
||||
var namespace = args.shift();
|
||||
assert(name != null, 'v5 name not specified');
|
||||
assert(namespace != null, 'v5 namespace not specified');
|
||||
|
||||
if (namespace == 'URL') namespace = uuidV5.URL;
|
||||
if (namespace == 'DNS') namespace = uuidV5.DNS;
|
||||
|
||||
console.log(uuidV5(name, namespace));
|
||||
break;
|
||||
|
||||
default:
|
||||
usage();
|
||||
process.exit(1);
|
||||
}
|
8
node_modules/@actions/cache/node_modules/uuid/index.js
generated
vendored
Normal file
8
node_modules/@actions/cache/node_modules/uuid/index.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
var v1 = require('./v1');
|
||||
var v4 = require('./v4');
|
||||
|
||||
var uuid = v4;
|
||||
uuid.v1 = v1;
|
||||
uuid.v4 = v4;
|
||||
|
||||
module.exports = uuid;
|
26
node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js
generated
vendored
Normal file
26
node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Convert array of 16 byte values to UUID string format of the form:
|
||||
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
||||
*/
|
||||
var byteToHex = [];
|
||||
for (var i = 0; i < 256; ++i) {
|
||||
byteToHex[i] = (i + 0x100).toString(16).substr(1);
|
||||
}
|
||||
|
||||
function bytesToUuid(buf, offset) {
|
||||
var i = offset || 0;
|
||||
var bth = byteToHex;
|
||||
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
|
||||
return ([
|
||||
bth[buf[i++]], bth[buf[i++]],
|
||||
bth[buf[i++]], bth[buf[i++]], '-',
|
||||
bth[buf[i++]], bth[buf[i++]], '-',
|
||||
bth[buf[i++]], bth[buf[i++]], '-',
|
||||
bth[buf[i++]], bth[buf[i++]], '-',
|
||||
bth[buf[i++]], bth[buf[i++]],
|
||||
bth[buf[i++]], bth[buf[i++]],
|
||||
bth[buf[i++]], bth[buf[i++]]
|
||||
]).join('');
|
||||
}
|
||||
|
||||
module.exports = bytesToUuid;
|
216
node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js
generated
vendored
Normal file
216
node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js
generated
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
/*
|
||||
* Browser-compatible JavaScript MD5
|
||||
*
|
||||
* Modification of JavaScript MD5
|
||||
* https://github.com/blueimp/JavaScript-MD5
|
||||
*
|
||||
* Copyright 2011, Sebastian Tschan
|
||||
* https://blueimp.net
|
||||
*
|
||||
* Licensed under the MIT license:
|
||||
* https://opensource.org/licenses/MIT
|
||||
*
|
||||
* Based on
|
||||
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||
* Digest Algorithm, as defined in RFC 1321.
|
||||
* Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
|
||||
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||
* Distributed under the BSD License
|
||||
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
function md5(bytes) {
|
||||
if (typeof(bytes) == 'string') {
|
||||
var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
|
||||
bytes = new Array(msg.length);
|
||||
for (var i = 0; i < msg.length; i++) bytes[i] = msg.charCodeAt(i);
|
||||
}
|
||||
|
||||
return md5ToHexEncodedArray(
|
||||
wordsToMd5(
|
||||
bytesToWords(bytes)
|
||||
, bytes.length * 8)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to an array of bytes
|
||||
*/
|
||||
function md5ToHexEncodedArray(input) {
|
||||
var i;
|
||||
var x;
|
||||
var output = [];
|
||||
var length32 = input.length * 32;
|
||||
var hexTab = '0123456789abcdef';
|
||||
var hex;
|
||||
|
||||
for (i = 0; i < length32; i += 8) {
|
||||
x = (input[i >> 5] >>> (i % 32)) & 0xFF;
|
||||
|
||||
hex = parseInt(hexTab.charAt((x >>> 4) & 0x0F) + hexTab.charAt(x & 0x0F), 16);
|
||||
|
||||
output.push(hex);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
/*
|
||||
* Calculate the MD5 of an array of little-endian words, and a bit length.
|
||||
*/
|
||||
function wordsToMd5(x, len) {
|
||||
/* append padding */
|
||||
x[len >> 5] |= 0x80 << (len % 32);
|
||||
x[(((len + 64) >>> 9) << 4) + 14] = len;
|
||||
|
||||
var i;
|
||||
var olda;
|
||||
var oldb;
|
||||
var oldc;
|
||||
var oldd;
|
||||
var a = 1732584193;
|
||||
var b = -271733879;
|
||||
var c = -1732584194;
|
||||
|
||||
var d = 271733878;
|
||||
|
||||
for (i = 0; i < x.length; i += 16) {
|
||||
olda = a;
|
||||
oldb = b;
|
||||
oldc = c;
|
||||
oldd = d;
|
||||
|
||||
a = md5ff(a, b, c, d, x[i], 7, -680876936);
|
||||
d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
|
||||
c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
|
||||
b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
|
||||
a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
|
||||
d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
|
||||
c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
|
||||
b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
|
||||
a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
|
||||
d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
|
||||
c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
|
||||
b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
|
||||
a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
|
||||
d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
|
||||
c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
|
||||
b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
|
||||
|
||||
a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
|
||||
d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
|
||||
c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
|
||||
b = md5gg(b, c, d, a, x[i], 20, -373897302);
|
||||
a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
|
||||
d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
|
||||
c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
|
||||
b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
|
||||
a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
|
||||
d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
|
||||
c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
|
||||
b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
|
||||
a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
|
||||
d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
|
||||
c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
|
||||
b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
|
||||
|
||||
a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
|
||||
d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
|
||||
c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
|
||||
b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
|
||||
a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
|
||||
d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
|
||||
c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
|
||||
b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
|
||||
a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
|
||||
d = md5hh(d, a, b, c, x[i], 11, -358537222);
|
||||
c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
|
||||
b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
|
||||
a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
|
||||
d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
|
||||
c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
|
||||
b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
|
||||
|
||||
a = md5ii(a, b, c, d, x[i], 6, -198630844);
|
||||
d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
|
||||
c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
|
||||
b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
|
||||
a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
|
||||
d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
|
||||
c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
|
||||
b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
|
||||
a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
|
||||
d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
|
||||
c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
|
||||
b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
|
||||
a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
|
||||
d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
|
||||
c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
|
||||
b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
|
||||
|
||||
a = safeAdd(a, olda);
|
||||
b = safeAdd(b, oldb);
|
||||
c = safeAdd(c, oldc);
|
||||
d = safeAdd(d, oldd);
|
||||
}
|
||||
return [a, b, c, d];
|
||||
}
|
||||
|
||||
/*
|
||||
* Convert an array bytes to an array of little-endian words
|
||||
* Characters >255 have their high-byte silently ignored.
|
||||
*/
|
||||
function bytesToWords(input) {
|
||||
var i;
|
||||
var output = [];
|
||||
output[(input.length >> 2) - 1] = undefined;
|
||||
for (i = 0; i < output.length; i += 1) {
|
||||
output[i] = 0;
|
||||
}
|
||||
var length8 = input.length * 8;
|
||||
for (i = 0; i < length8; i += 8) {
|
||||
output[i >> 5] |= (input[(i / 8)] & 0xFF) << (i % 32);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
/*
|
||||
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
||||
* to work around bugs in some JS interpreters.
|
||||
*/
|
||||
function safeAdd(x, y) {
|
||||
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
|
||||
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
||||
return (msw << 16) | (lsw & 0xFFFF);
|
||||
}
|
||||
|
||||
/*
|
||||
* Bitwise rotate a 32-bit number to the left.
|
||||
*/
|
||||
function bitRotateLeft(num, cnt) {
|
||||
return (num << cnt) | (num >>> (32 - cnt));
|
||||
}
|
||||
|
||||
/*
|
||||
* These functions implement the four basic operations the algorithm uses.
|
||||
*/
|
||||
function md5cmn(q, a, b, x, s, t) {
|
||||
return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
|
||||
}
|
||||
function md5ff(a, b, c, d, x, s, t) {
|
||||
return md5cmn((b & c) | ((~b) & d), a, b, x, s, t);
|
||||
}
|
||||
function md5gg(a, b, c, d, x, s, t) {
|
||||
return md5cmn((b & d) | (c & (~d)), a, b, x, s, t);
|
||||
}
|
||||
function md5hh(a, b, c, d, x, s, t) {
|
||||
return md5cmn(b ^ c ^ d, a, b, x, s, t);
|
||||
}
|
||||
function md5ii(a, b, c, d, x, s, t) {
|
||||
return md5cmn(c ^ (b | (~d)), a, b, x, s, t);
|
||||
}
|
||||
|
||||
module.exports = md5;
|
25
node_modules/@actions/cache/node_modules/uuid/lib/md5.js
generated
vendored
Normal file
25
node_modules/@actions/cache/node_modules/uuid/lib/md5.js
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
function md5(bytes) {
|
||||
if (typeof Buffer.from === 'function') {
|
||||
// Modern Buffer API
|
||||
if (Array.isArray(bytes)) {
|
||||
bytes = Buffer.from(bytes);
|
||||
} else if (typeof bytes === 'string') {
|
||||
bytes = Buffer.from(bytes, 'utf8');
|
||||
}
|
||||
} else {
|
||||
// Pre-v4 Buffer API
|
||||
if (Array.isArray(bytes)) {
|
||||
bytes = new Buffer(bytes);
|
||||
} else if (typeof bytes === 'string') {
|
||||
bytes = new Buffer(bytes, 'utf8');
|
||||
}
|
||||
}
|
||||
|
||||
return crypto.createHash('md5').update(bytes).digest();
|
||||
}
|
||||
|
||||
module.exports = md5;
|
34
node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js
generated
vendored
Normal file
34
node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
// Unique ID creation requires a high quality random # generator. In the
|
||||
// browser this is a little complicated due to unknown quality of Math.random()
|
||||
// and inconsistent support for the `crypto` API. We do the best we can via
|
||||
// feature-detection
|
||||
|
||||
// getRandomValues needs to be invoked in a context where "this" is a Crypto
|
||||
// implementation. Also, find the complete implementation of crypto on IE11.
|
||||
var getRandomValues = (typeof(crypto) != 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto)) ||
|
||||
(typeof(msCrypto) != 'undefined' && typeof window.msCrypto.getRandomValues == 'function' && msCrypto.getRandomValues.bind(msCrypto));
|
||||
|
||||
if (getRandomValues) {
|
||||
// WHATWG crypto RNG - http://wiki.whatwg.org/wiki/Crypto
|
||||
var rnds8 = new Uint8Array(16); // eslint-disable-line no-undef
|
||||
|
||||
module.exports = function whatwgRNG() {
|
||||
getRandomValues(rnds8);
|
||||
return rnds8;
|
||||
};
|
||||
} else {
|
||||
// Math.random()-based (RNG)
|
||||
//
|
||||
// If all else fails, use Math.random(). It's fast, but is of unspecified
|
||||
// quality.
|
||||
var rnds = new Array(16);
|
||||
|
||||
module.exports = function mathRNG() {
|
||||
for (var i = 0, r; i < 16; i++) {
|
||||
if ((i & 0x03) === 0) r = Math.random() * 0x100000000;
|
||||
rnds[i] = r >>> ((i & 0x03) << 3) & 0xff;
|
||||
}
|
||||
|
||||
return rnds;
|
||||
};
|
||||
}
|
8
node_modules/@actions/cache/node_modules/uuid/lib/rng.js
generated
vendored
Normal file
8
node_modules/@actions/cache/node_modules/uuid/lib/rng.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
// Unique ID creation requires a high quality random # generator. In node.js
|
||||
// this is pretty straight-forward - we use the crypto API.
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
module.exports = function nodeRNG() {
|
||||
return crypto.randomBytes(16);
|
||||
};
|
89
node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js
generated
vendored
Normal file
89
node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
// Adapted from Chris Veness' SHA1 code at
|
||||
// http://www.movable-type.co.uk/scripts/sha1.html
|
||||
'use strict';
|
||||
|
||||
function f(s, x, y, z) {
|
||||
switch (s) {
|
||||
case 0: return (x & y) ^ (~x & z);
|
||||
case 1: return x ^ y ^ z;
|
||||
case 2: return (x & y) ^ (x & z) ^ (y & z);
|
||||
case 3: return x ^ y ^ z;
|
||||
}
|
||||
}
|
||||
|
||||
function ROTL(x, n) {
|
||||
return (x << n) | (x>>> (32 - n));
|
||||
}
|
||||
|
||||
function sha1(bytes) {
|
||||
var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
|
||||
var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
|
||||
|
||||
if (typeof(bytes) == 'string') {
|
||||
var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
|
||||
bytes = new Array(msg.length);
|
||||
for (var i = 0; i < msg.length; i++) bytes[i] = msg.charCodeAt(i);
|
||||
}
|
||||
|
||||
bytes.push(0x80);
|
||||
|
||||
var l = bytes.length/4 + 2;
|
||||
var N = Math.ceil(l/16);
|
||||
var M = new Array(N);
|
||||
|
||||
for (var i=0; i<N; i++) {
|
||||
M[i] = new Array(16);
|
||||
for (var j=0; j<16; j++) {
|
||||
M[i][j] =
|
||||
bytes[i * 64 + j * 4] << 24 |
|
||||
bytes[i * 64 + j * 4 + 1] << 16 |
|
||||
bytes[i * 64 + j * 4 + 2] << 8 |
|
||||
bytes[i * 64 + j * 4 + 3];
|
||||
}
|
||||
}
|
||||
|
||||
M[N - 1][14] = ((bytes.length - 1) * 8) /
|
||||
Math.pow(2, 32); M[N - 1][14] = Math.floor(M[N - 1][14]);
|
||||
M[N - 1][15] = ((bytes.length - 1) * 8) & 0xffffffff;
|
||||
|
||||
for (var i=0; i<N; i++) {
|
||||
var W = new Array(80);
|
||||
|
||||
for (var t=0; t<16; t++) W[t] = M[i][t];
|
||||
for (var t=16; t<80; t++) {
|
||||
W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1);
|
||||
}
|
||||
|
||||
var a = H[0];
|
||||
var b = H[1];
|
||||
var c = H[2];
|
||||
var d = H[3];
|
||||
var e = H[4];
|
||||
|
||||
for (var t=0; t<80; t++) {
|
||||
var s = Math.floor(t/20);
|
||||
var T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0;
|
||||
e = d;
|
||||
d = c;
|
||||
c = ROTL(b, 30) >>> 0;
|
||||
b = a;
|
||||
a = T;
|
||||
}
|
||||
|
||||
H[0] = (H[0] + a) >>> 0;
|
||||
H[1] = (H[1] + b) >>> 0;
|
||||
H[2] = (H[2] + c) >>> 0;
|
||||
H[3] = (H[3] + d) >>> 0;
|
||||
H[4] = (H[4] + e) >>> 0;
|
||||
}
|
||||
|
||||
return [
|
||||
H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff,
|
||||
H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff,
|
||||
H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff,
|
||||
H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff,
|
||||
H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff
|
||||
];
|
||||
}
|
||||
|
||||
module.exports = sha1;
|
25
node_modules/@actions/cache/node_modules/uuid/lib/sha1.js
generated
vendored
Normal file
25
node_modules/@actions/cache/node_modules/uuid/lib/sha1.js
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
var crypto = require('crypto');
|
||||
|
||||
function sha1(bytes) {
|
||||
if (typeof Buffer.from === 'function') {
|
||||
// Modern Buffer API
|
||||
if (Array.isArray(bytes)) {
|
||||
bytes = Buffer.from(bytes);
|
||||
} else if (typeof bytes === 'string') {
|
||||
bytes = Buffer.from(bytes, 'utf8');
|
||||
}
|
||||
} else {
|
||||
// Pre-v4 Buffer API
|
||||
if (Array.isArray(bytes)) {
|
||||
bytes = new Buffer(bytes);
|
||||
} else if (typeof bytes === 'string') {
|
||||
bytes = new Buffer(bytes, 'utf8');
|
||||
}
|
||||
}
|
||||
|
||||
return crypto.createHash('sha1').update(bytes).digest();
|
||||
}
|
||||
|
||||
module.exports = sha1;
|
57
node_modules/@actions/cache/node_modules/uuid/lib/v35.js
generated
vendored
Normal file
57
node_modules/@actions/cache/node_modules/uuid/lib/v35.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
var bytesToUuid = require('./bytesToUuid');
|
||||
|
||||
function uuidToBytes(uuid) {
|
||||
// Note: We assume we're being passed a valid uuid string
|
||||
var bytes = [];
|
||||
uuid.replace(/[a-fA-F0-9]{2}/g, function(hex) {
|
||||
bytes.push(parseInt(hex, 16));
|
||||
});
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
function stringToBytes(str) {
|
||||
str = unescape(encodeURIComponent(str)); // UTF8 escape
|
||||
var bytes = new Array(str.length);
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
bytes[i] = str.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
module.exports = function(name, version, hashfunc) {
|
||||
var generateUUID = function(value, namespace, buf, offset) {
|
||||
var off = buf && offset || 0;
|
||||
|
||||
if (typeof(value) == 'string') value = stringToBytes(value);
|
||||
if (typeof(namespace) == 'string') namespace = uuidToBytes(namespace);
|
||||
|
||||
if (!Array.isArray(value)) throw TypeError('value must be an array of bytes');
|
||||
if (!Array.isArray(namespace) || namespace.length !== 16) throw TypeError('namespace must be uuid string or an Array of 16 byte values');
|
||||
|
||||
// Per 4.3
|
||||
var bytes = hashfunc(namespace.concat(value));
|
||||
bytes[6] = (bytes[6] & 0x0f) | version;
|
||||
bytes[8] = (bytes[8] & 0x3f) | 0x80;
|
||||
|
||||
if (buf) {
|
||||
for (var idx = 0; idx < 16; ++idx) {
|
||||
buf[off+idx] = bytes[idx];
|
||||
}
|
||||
}
|
||||
|
||||
return buf || bytesToUuid(bytes);
|
||||
};
|
||||
|
||||
// Function#name is not settable on some platforms (#270)
|
||||
try {
|
||||
generateUUID.name = name;
|
||||
} catch (err) {
|
||||
}
|
||||
|
||||
// Pre-defined namespaces, per Appendix C
|
||||
generateUUID.DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
|
||||
generateUUID.URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
|
||||
|
||||
return generateUUID;
|
||||
};
|
49
node_modules/@actions/cache/node_modules/uuid/package.json
generated
vendored
Normal file
49
node_modules/@actions/cache/node_modules/uuid/package.json
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "uuid",
|
||||
"version": "3.4.0",
|
||||
"description": "RFC4122 (v1, v4, and v5) UUIDs",
|
||||
"commitlint": {
|
||||
"extends": [
|
||||
"@commitlint/config-conventional"
|
||||
]
|
||||
},
|
||||
"keywords": [
|
||||
"uuid",
|
||||
"guid",
|
||||
"rfc4122"
|
||||
],
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"uuid": "./bin/uuid"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "~8.2.0",
|
||||
"@commitlint/config-conventional": "~8.2.0",
|
||||
"eslint": "~6.4.0",
|
||||
"husky": "~3.0.5",
|
||||
"mocha": "6.2.0",
|
||||
"runmd": "1.2.1",
|
||||
"standard-version": "7.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint .",
|
||||
"test": "npm run lint && mocha test/test.js",
|
||||
"md": "runmd --watch --output=README.md README_js.md",
|
||||
"release": "standard-version",
|
||||
"prepare": "runmd --output=README.md README_js.md"
|
||||
},
|
||||
"browser": {
|
||||
"./lib/rng.js": "./lib/rng-browser.js",
|
||||
"./lib/sha1.js": "./lib/sha1-browser.js",
|
||||
"./lib/md5.js": "./lib/md5-browser.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/uuidjs/uuid.git"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS"
|
||||
}
|
||||
}
|
||||
}
|
109
node_modules/@actions/cache/node_modules/uuid/v1.js
generated
vendored
Normal file
109
node_modules/@actions/cache/node_modules/uuid/v1.js
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
var rng = require('./lib/rng');
|
||||
var bytesToUuid = require('./lib/bytesToUuid');
|
||||
|
||||
// **`v1()` - Generate time-based UUID**
|
||||
//
|
||||
// Inspired by https://github.com/LiosK/UUID.js
|
||||
// and http://docs.python.org/library/uuid.html
|
||||
|
||||
var _nodeId;
|
||||
var _clockseq;
|
||||
|
||||
// Previous uuid creation time
|
||||
var _lastMSecs = 0;
|
||||
var _lastNSecs = 0;
|
||||
|
||||
// See https://github.com/uuidjs/uuid for API details
|
||||
function v1(options, buf, offset) {
|
||||
var i = buf && offset || 0;
|
||||
var b = buf || [];
|
||||
|
||||
options = options || {};
|
||||
var node = options.node || _nodeId;
|
||||
var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq;
|
||||
|
||||
// node and clockseq need to be initialized to random values if they're not
|
||||
// specified. We do this lazily to minimize issues related to insufficient
|
||||
// system entropy. See #189
|
||||
if (node == null || clockseq == null) {
|
||||
var seedBytes = rng();
|
||||
if (node == null) {
|
||||
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
|
||||
node = _nodeId = [
|
||||
seedBytes[0] | 0x01,
|
||||
seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]
|
||||
];
|
||||
}
|
||||
if (clockseq == null) {
|
||||
// Per 4.2.2, randomize (14 bit) clockseq
|
||||
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
|
||||
}
|
||||
}
|
||||
|
||||
// UUID timestamps are 100 nano-second units since the Gregorian epoch,
|
||||
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
|
||||
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
|
||||
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
|
||||
var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime();
|
||||
|
||||
// Per 4.2.1.2, use count of uuid's generated during the current clock
|
||||
// cycle to simulate higher resolution clock
|
||||
var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1;
|
||||
|
||||
// Time since last uuid creation (in msecs)
|
||||
var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000;
|
||||
|
||||
// Per 4.2.1.2, Bump clockseq on clock regression
|
||||
if (dt < 0 && options.clockseq === undefined) {
|
||||
clockseq = clockseq + 1 & 0x3fff;
|
||||
}
|
||||
|
||||
// Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
|
||||
// time interval
|
||||
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
|
||||
nsecs = 0;
|
||||
}
|
||||
|
||||
// Per 4.2.1.2 Throw error if too many uuids are requested
|
||||
if (nsecs >= 10000) {
|
||||
throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec');
|
||||
}
|
||||
|
||||
_lastMSecs = msecs;
|
||||
_lastNSecs = nsecs;
|
||||
_clockseq = clockseq;
|
||||
|
||||
// Per 4.1.4 - Convert from unix epoch to Gregorian epoch
|
||||
msecs += 12219292800000;
|
||||
|
||||
// `time_low`
|
||||
var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
|
||||
b[i++] = tl >>> 24 & 0xff;
|
||||
b[i++] = tl >>> 16 & 0xff;
|
||||
b[i++] = tl >>> 8 & 0xff;
|
||||
b[i++] = tl & 0xff;
|
||||
|
||||
// `time_mid`
|
||||
var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff;
|
||||
b[i++] = tmh >>> 8 & 0xff;
|
||||
b[i++] = tmh & 0xff;
|
||||
|
||||
// `time_high_and_version`
|
||||
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
|
||||
b[i++] = tmh >>> 16 & 0xff;
|
||||
|
||||
// `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
|
||||
b[i++] = clockseq >>> 8 | 0x80;
|
||||
|
||||
// `clock_seq_low`
|
||||
b[i++] = clockseq & 0xff;
|
||||
|
||||
// `node`
|
||||
for (var n = 0; n < 6; ++n) {
|
||||
b[i + n] = node[n];
|
||||
}
|
||||
|
||||
return buf ? buf : bytesToUuid(b);
|
||||
}
|
||||
|
||||
module.exports = v1;
|
4
node_modules/@actions/cache/node_modules/uuid/v3.js
generated
vendored
Normal file
4
node_modules/@actions/cache/node_modules/uuid/v3.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
var v35 = require('./lib/v35.js');
|
||||
var md5 = require('./lib/md5');
|
||||
|
||||
module.exports = v35('v3', 0x30, md5);
|
29
node_modules/@actions/cache/node_modules/uuid/v4.js
generated
vendored
Normal file
29
node_modules/@actions/cache/node_modules/uuid/v4.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
var rng = require('./lib/rng');
|
||||
var bytesToUuid = require('./lib/bytesToUuid');
|
||||
|
||||
function v4(options, buf, offset) {
|
||||
var i = buf && offset || 0;
|
||||
|
||||
if (typeof(options) == 'string') {
|
||||
buf = options === 'binary' ? new Array(16) : null;
|
||||
options = null;
|
||||
}
|
||||
options = options || {};
|
||||
|
||||
var rnds = options.random || (options.rng || rng)();
|
||||
|
||||
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
|
||||
rnds[6] = (rnds[6] & 0x0f) | 0x40;
|
||||
rnds[8] = (rnds[8] & 0x3f) | 0x80;
|
||||
|
||||
// Copy bytes to buffer, if provided
|
||||
if (buf) {
|
||||
for (var ii = 0; ii < 16; ++ii) {
|
||||
buf[i + ii] = rnds[ii];
|
||||
}
|
||||
}
|
||||
|
||||
return buf || bytesToUuid(rnds);
|
||||
}
|
||||
|
||||
module.exports = v4;
|
3
node_modules/@actions/cache/node_modules/uuid/v5.js
generated
vendored
Normal file
3
node_modules/@actions/cache/node_modules/uuid/v5.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
var v35 = require('./lib/v35.js');
|
||||
var sha1 = require('./lib/sha1');
|
||||
module.exports = v35('v5', 0x50, sha1);
|
56
node_modules/@actions/cache/package.json
generated
vendored
Normal file
56
node_modules/@actions/cache/package.json
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "3.2.4",
|
||||
"preview": true,
|
||||
"description": "Actions cache lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"cache"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/main/packages/cache",
|
||||
"license": "MIT",
|
||||
"main": "lib/cache.js",
|
||||
"types": "lib/cache.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib",
|
||||
"!.DS_Store"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/cache"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.1.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.13.0",
|
||||
"semver": "^6.3.1",
|
||||
"uuid": "^3.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/semver": "^6.0.0",
|
||||
"@types/uuid": "^3.4.5",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
9
node_modules/@actions/glob/LICENSE.md
generated
vendored
Normal file
9
node_modules/@actions/glob/LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
113
node_modules/@actions/glob/README.md
generated
vendored
Normal file
113
node_modules/@actions/glob/README.md
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
# `@actions/glob`
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic
|
||||
|
||||
You can use this package to search for files matching glob patterns.
|
||||
|
||||
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
|
||||
|
||||
```js
|
||||
const glob = require('@actions/glob');
|
||||
|
||||
const patterns = ['**/tar.gz', '**/tar.bz']
|
||||
const globber = await glob.create(patterns.join('\n'))
|
||||
const files = await globber.glob()
|
||||
```
|
||||
|
||||
### Opt out of following symbolic links
|
||||
|
||||
```js
|
||||
const glob = require('@actions/glob');
|
||||
|
||||
const globber = await glob.create('**', {followSymbolicLinks: false})
|
||||
const files = await globber.glob()
|
||||
```
|
||||
|
||||
### Iterator
|
||||
|
||||
When dealing with a large amount of results, consider iterating the results as they are returned:
|
||||
|
||||
```js
|
||||
const glob = require('@actions/glob');
|
||||
|
||||
const globber = await glob.create('**')
|
||||
for await (const file of globber.globGenerator()) {
|
||||
console.log(file)
|
||||
}
|
||||
```
|
||||
|
||||
## Recommended action inputs
|
||||
|
||||
Glob follows symbolic links by default. Following is often appropriate unless deleting files.
|
||||
|
||||
Users may want to opt-out from following symbolic links for other reasons. For example,
|
||||
excessive amounts of symbolic links can create the appearance of very, very many files
|
||||
and slow the search.
|
||||
|
||||
When an action allows a user to specify input patterns, it is generally recommended to
|
||||
allow users to opt-out from following symbolic links.
|
||||
|
||||
Snippet from `action.yml`:
|
||||
|
||||
```yaml
|
||||
inputs:
|
||||
files:
|
||||
description: 'Files to print'
|
||||
required: true
|
||||
follow-symbolic-links:
|
||||
description: 'Indicates whether to follow symbolic links'
|
||||
default: true
|
||||
```
|
||||
|
||||
And corresponding toolkit consumption:
|
||||
|
||||
```js
|
||||
const core = require('@actions/core')
|
||||
const glob = require('@actions/glob')
|
||||
|
||||
const globOptions = {
|
||||
followSymbolicLinks: core.getInput('follow-symbolic-links').toUpper() !== 'FALSE'
|
||||
}
|
||||
const globber = glob.create(core.getInput('files'), globOptions)
|
||||
for await (const file of globber.globGenerator()) {
|
||||
console.log(file)
|
||||
}
|
||||
```
|
||||
|
||||
## Patterns
|
||||
|
||||
### Glob behavior
|
||||
|
||||
Patterns `*`, `?`, `[...]`, `**` (globstar) are supported.
|
||||
|
||||
With the following behaviors:
|
||||
- File names that begin with `.` may be included in the results
|
||||
- Case insensitive on Windows
|
||||
- Directory separator `/` and `\` both supported on Windows
|
||||
|
||||
### Tilde expansion
|
||||
|
||||
Supports basic tilde expansion, for current user HOME replacement only.
|
||||
|
||||
Example:
|
||||
- `~` may expand to /Users/johndoe
|
||||
- `~/foo` may expand to /Users/johndoe/foo
|
||||
|
||||
### Comments
|
||||
|
||||
Patterns that begin with `#` are treated as comments.
|
||||
|
||||
### Exclude patterns
|
||||
|
||||
Leading `!` changes the meaning of an include pattern to exclude.
|
||||
|
||||
Multiple leading `!` flips the meaning.
|
||||
|
||||
### Escaping
|
||||
|
||||
Wrapping special characters in `[]` can be used to escape literal glob characters
|
||||
in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`.
|
||||
|
||||
On Linux/macOS `\` is also treated as an escape character.
|
10
node_modules/@actions/glob/lib/glob.d.ts
generated
vendored
Normal file
10
node_modules/@actions/glob/lib/glob.d.ts
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
import { Globber } from './internal-globber';
|
||||
import { GlobOptions } from './internal-glob-options';
|
||||
export { Globber, GlobOptions };
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param options Glob options
|
||||
*/
|
||||
export declare function create(patterns: string, options?: GlobOptions): Promise<Globber>;
|
26
node_modules/@actions/glob/lib/glob.js
generated
vendored
Normal file
26
node_modules/@actions/glob/lib/glob.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.create = void 0;
|
||||
const internal_globber_1 = require("./internal-globber");
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
* @param patterns Patterns separated by newlines
|
||||
* @param options Glob options
|
||||
*/
|
||||
function create(patterns, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield internal_globber_1.DefaultGlobber.create(patterns, options);
|
||||
});
|
||||
}
|
||||
exports.create = create;
|
||||
//# sourceMappingURL=glob.js.map
|
1
node_modules/@actions/glob/lib/glob.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/glob.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"glob.js","sourceRoot":"","sources":["../src/glob.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,yDAA0D;AAK1D;;;;;GAKG;AACH,SAAsB,MAAM,CAC1B,QAAgB,EAChB,OAAqB;;QAErB,OAAO,MAAM,iCAAc,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAA;IACvD,CAAC;CAAA;AALD,wBAKC"}
|
5
node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts
generated
vendored
Normal file
5
node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
import { GlobOptions } from './internal-glob-options';
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
export declare function getOptions(copy?: GlobOptions): GlobOptions;
|
50
node_modules/@actions/glob/lib/internal-glob-options-helper.js
generated
vendored
Normal file
50
node_modules/@actions/glob/lib/internal-glob-options-helper.js
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getOptions = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
function getOptions(copy) {
|
||||
const result = {
|
||||
followSymbolicLinks: true,
|
||||
implicitDescendants: true,
|
||||
omitBrokenSymbolicLinks: true
|
||||
};
|
||||
if (copy) {
|
||||
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||||
result.followSymbolicLinks = copy.followSymbolicLinks;
|
||||
core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`);
|
||||
}
|
||||
if (typeof copy.implicitDescendants === 'boolean') {
|
||||
result.implicitDescendants = copy.implicitDescendants;
|
||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||||
}
|
||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.getOptions = getOptions;
|
||||
//# sourceMappingURL=internal-glob-options-helper.js.map
|
1
node_modules/@actions/glob/lib/internal-glob-options-helper.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-glob-options-helper.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-glob-options-helper.js","sourceRoot":"","sources":["../src/internal-glob-options-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAGrC;;GAEG;AACH,SAAgB,UAAU,CAAC,IAAkB;IAC3C,MAAM,MAAM,GAAgB;QAC1B,mBAAmB,EAAE,IAAI;QACzB,mBAAmB,EAAE,IAAI;QACzB,uBAAuB,EAAE,IAAI;KAC9B,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;YAC7D,IAAI,CAAC,KAAK,CAAC,4BAA4B,MAAM,CAAC,uBAAuB,GAAG,CAAC,CAAA;SAC1E;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAzBD,gCAyBC"}
|
29
node_modules/@actions/glob/lib/internal-glob-options.d.ts
generated
vendored
Normal file
29
node_modules/@actions/glob/lib/internal-glob-options.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
/**
|
||||
* Options to control globbing behavior
|
||||
*/
|
||||
export interface GlobOptions {
|
||||
/**
|
||||
* Indicates whether to follow symbolic links. Generally should set to false
|
||||
* when deleting files.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
followSymbolicLinks?: boolean;
|
||||
/**
|
||||
* Indicates whether directories that match a glob pattern, should implicitly
|
||||
* cause all descendant paths to be matched.
|
||||
*
|
||||
* For example, given the directory `my-dir`, the following glob patterns
|
||||
* would produce the same results: `my-dir/**`, `my-dir/`, `my-dir`
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
implicitDescendants?: boolean;
|
||||
/**
|
||||
* Indicates whether broken symbolic should be ignored and omitted from the
|
||||
* result set. Otherwise an error will be thrown.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
omitBrokenSymbolicLinks?: boolean;
|
||||
}
|
3
node_modules/@actions/glob/lib/internal-glob-options.js
generated
vendored
Normal file
3
node_modules/@actions/glob/lib/internal-glob-options.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=internal-glob-options.js.map
|
1
node_modules/@actions/glob/lib/internal-glob-options.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-glob-options.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-glob-options.js","sourceRoot":"","sources":["../src/internal-glob-options.ts"],"names":[],"mappings":""}
|
42
node_modules/@actions/glob/lib/internal-globber.d.ts
generated
vendored
Normal file
42
node_modules/@actions/glob/lib/internal-globber.d.ts
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
import { GlobOptions } from './internal-glob-options';
|
||||
export { GlobOptions };
|
||||
/**
|
||||
* Used to match files and directories
|
||||
*/
|
||||
export interface Globber {
|
||||
/**
|
||||
* Returns the search path preceding the first glob segment, from each pattern.
|
||||
* Duplicates and descendants of other paths are filtered out.
|
||||
*
|
||||
* Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`.
|
||||
*
|
||||
* Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`.
|
||||
*/
|
||||
getSearchPaths(): string[];
|
||||
/**
|
||||
* Returns files and directories matching the glob patterns.
|
||||
*
|
||||
* Order of the results is not guaranteed.
|
||||
*/
|
||||
glob(): Promise<string[]>;
|
||||
/**
|
||||
* Returns files and directories matching the glob patterns.
|
||||
*
|
||||
* Order of the results is not guaranteed.
|
||||
*/
|
||||
globGenerator(): AsyncGenerator<string, void>;
|
||||
}
|
||||
export declare class DefaultGlobber implements Globber {
|
||||
private readonly options;
|
||||
private readonly patterns;
|
||||
private readonly searchPaths;
|
||||
private constructor();
|
||||
getSearchPaths(): string[];
|
||||
glob(): Promise<string[]>;
|
||||
globGenerator(): AsyncGenerator<string, void>;
|
||||
/**
|
||||
* Constructs a DefaultGlobber
|
||||
*/
|
||||
static create(patterns: string, options?: GlobOptions): Promise<DefaultGlobber>;
|
||||
private static stat;
|
||||
}
|
235
node_modules/@actions/glob/lib/internal-globber.js
generated
vendored
Normal file
235
node_modules/@actions/glob/lib/internal-globber.js
generated
vendored
Normal file
@ -0,0 +1,235 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
||||
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||||
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||||
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||||
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||||
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||||
function fulfill(value) { resume("next", value); }
|
||||
function reject(value) { resume("throw", value); }
|
||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DefaultGlobber = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const globOptionsHelper = __importStar(require("./internal-glob-options-helper"));
|
||||
const path = __importStar(require("path"));
|
||||
const patternHelper = __importStar(require("./internal-pattern-helper"));
|
||||
const internal_match_kind_1 = require("./internal-match-kind");
|
||||
const internal_pattern_1 = require("./internal-pattern");
|
||||
const internal_search_state_1 = require("./internal-search-state");
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class DefaultGlobber {
|
||||
constructor(options) {
|
||||
this.patterns = [];
|
||||
this.searchPaths = [];
|
||||
this.options = globOptionsHelper.getOptions(options);
|
||||
}
|
||||
getSearchPaths() {
|
||||
// Return a copy
|
||||
return this.searchPaths.slice();
|
||||
}
|
||||
glob() {
|
||||
var e_1, _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const result = [];
|
||||
try {
|
||||
for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
||||
const itemPath = _c.value;
|
||||
result.push(itemPath);
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
globGenerator() {
|
||||
return __asyncGenerator(this, arguments, function* globGenerator_1() {
|
||||
// Fill in defaults options
|
||||
const options = globOptionsHelper.getOptions(this.options);
|
||||
// Implicit descendants?
|
||||
const patterns = [];
|
||||
for (const pattern of this.patterns) {
|
||||
patterns.push(pattern);
|
||||
if (options.implicitDescendants &&
|
||||
(pattern.trailingSeparator ||
|
||||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||||
}
|
||||
}
|
||||
// Push the search paths
|
||||
const stack = [];
|
||||
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
|
||||
core.debug(`Search path '${searchPath}'`);
|
||||
// Exists?
|
||||
try {
|
||||
// Intentionally using lstat. Detection for broken symlink
|
||||
// will be performed later (if following symlinks).
|
||||
yield __await(fs.promises.lstat(searchPath));
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
|
||||
}
|
||||
// Search
|
||||
const traversalChain = []; // used to detect cycles
|
||||
while (stack.length) {
|
||||
// Pop
|
||||
const item = stack.pop();
|
||||
// Match?
|
||||
const match = patternHelper.match(patterns, item.path);
|
||||
const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
|
||||
if (!match && !partialMatch) {
|
||||
continue;
|
||||
}
|
||||
// Stat
|
||||
const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
|
||||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||||
);
|
||||
// Broken symlink, or symlink cycle detected, or no longer exists
|
||||
if (!stats) {
|
||||
continue;
|
||||
}
|
||||
// Directory
|
||||
if (stats.isDirectory()) {
|
||||
// Matched
|
||||
if (match & internal_match_kind_1.MatchKind.Directory) {
|
||||
yield yield __await(item.path);
|
||||
}
|
||||
// Descend?
|
||||
else if (!partialMatch) {
|
||||
continue;
|
||||
}
|
||||
// Push the child items in reverse
|
||||
const childLevel = item.level + 1;
|
||||
const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
|
||||
stack.push(...childItems.reverse());
|
||||
}
|
||||
// File
|
||||
else if (match & internal_match_kind_1.MatchKind.File) {
|
||||
yield yield __await(item.path);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Constructs a DefaultGlobber
|
||||
*/
|
||||
static create(patterns, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const result = new DefaultGlobber(options);
|
||||
if (IS_WINDOWS) {
|
||||
patterns = patterns.replace(/\r\n/g, '\n');
|
||||
patterns = patterns.replace(/\r/g, '\n');
|
||||
}
|
||||
const lines = patterns.split('\n').map(x => x.trim());
|
||||
for (const line of lines) {
|
||||
// Empty or comment
|
||||
if (!line || line.startsWith('#')) {
|
||||
continue;
|
||||
}
|
||||
// Pattern
|
||||
else {
|
||||
result.patterns.push(new internal_pattern_1.Pattern(line));
|
||||
}
|
||||
}
|
||||
result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
|
||||
return result;
|
||||
});
|
||||
}
|
||||
static stat(item, options, traversalChain) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Note:
|
||||
// `stat` returns info about the target of a symlink (or symlink chain)
|
||||
// `lstat` returns info about a symlink itself
|
||||
let stats;
|
||||
if (options.followSymbolicLinks) {
|
||||
try {
|
||||
// Use `stat` (following symlinks)
|
||||
stats = yield fs.promises.stat(item.path);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
if (options.omitBrokenSymbolicLinks) {
|
||||
core.debug(`Broken symlink '${item.path}'`);
|
||||
return undefined;
|
||||
}
|
||||
throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Use `lstat` (not following symlinks)
|
||||
stats = yield fs.promises.lstat(item.path);
|
||||
}
|
||||
// Note, isDirectory() returns false for the lstat of a symlink
|
||||
if (stats.isDirectory() && options.followSymbolicLinks) {
|
||||
// Get the realpath
|
||||
const realPath = yield fs.promises.realpath(item.path);
|
||||
// Fixup the traversal chain to match the item level
|
||||
while (traversalChain.length >= item.level) {
|
||||
traversalChain.pop();
|
||||
}
|
||||
// Test for a cycle
|
||||
if (traversalChain.some((x) => x === realPath)) {
|
||||
core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
|
||||
return undefined;
|
||||
}
|
||||
// Update the traversal chain
|
||||
traversalChain.push(realPath);
|
||||
}
|
||||
return stats;
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DefaultGlobber = DefaultGlobber;
|
||||
//# sourceMappingURL=internal-globber.js.map
|
1
node_modules/@actions/glob/lib/internal-globber.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-globber.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-globber.js","sourceRoot":"","sources":["../src/internal-globber.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,uCAAwB;AACxB,kFAAmE;AACnE,2CAA4B;AAC5B,yEAA0D;AAE1D,+DAA+C;AAC/C,yDAA0C;AAC1C,mEAAmD;AAEnD,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAiC/C,MAAa,cAAc;IAKzB,YAAoB,OAAqB;QAHxB,aAAQ,GAAc,EAAE,CAAA;QACxB,gBAAW,GAAa,EAAE,CAAA;QAGzC,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtD,CAAC;IAED,cAAc;QACZ,gBAAgB;QAChB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,CAAA;IACjC,CAAC;IAEK,IAAI;;;YACR,MAAM,MAAM,GAAa,EAAE,CAAA;;gBAC3B,KAA6B,IAAA,KAAA,cAAA,IAAI,CAAC,aAAa,EAAE,CAAA,IAAA;oBAAtC,MAAM,QAAQ,WAAA,CAAA;oBACvB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACtB;;;;;;;;;YACD,OAAO,MAAM,CAAA;;KACd;IAEM,aAAa;;YAClB,2BAA2B;YAC3B,MAAM,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YAC1D,wBAAwB;YACxB,MAAM,QAAQ,GAAc,EAAE,CAAA;YAC9B,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBACtB,IACE,OAAO,CAAC,mBAAmB;oBAC3B,CAAC,OAAO,CAAC,iBAAiB;wBACxB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,EACzD;oBACA,QAAQ,CAAC,IAAI,CACX,IAAI,0BAAO,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CACjE,CAAA;iBACF;aACF;YAED,wBAAwB;YAExB,MAAM,KAAK,GAAkB,EAAE,CAAA;YAC/B,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,cAAc,CAAC,QAAQ,CAAC,EAAE;gBAC/D,IAAI,CAAC,KAAK,CAAC,gBAAgB,UAAU,GAAG,CAAC,CAAA;gBAEzC,UAAU;gBACV,IAAI;oBACF,0DAA0D;oBAC1D,mDAAmD;oBACnD,cAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA,CAAA;iBACpC;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,SAAQ;qBACT;oBACD,MAAM,GAAG,CAAA;iBACV;gBAED,KAAK,CAAC,OAAO,CAAC,IAAI,mCAAW,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;aAC9C;YAED,SAAS;YACT,MAAM,cAAc,GAAa,EAAE,CAAA,CAAC,wBAAwB;YAC5D,OAAO,KAAK,CAAC,MAAM,EAAE;gBACnB,MAAM;gBACN,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAiB,CAAA;gBAEvC,SAAS;gBACT,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBACtD,MAAM,YAAY,GAChB,CAAC,CAAC,KAAK,IAAI,aAAa,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBAC5D,IAAI,CAAC,KAAK,IAAI,CAAC,YAAY,EAAE;oBAC3B,SAAQ;iBACT;gBAED,OAAO;gBACP,MAAM,KAAK,GAAyB,cAAM,cAAc,CAAC,IAAI,CAC3D,IAAI,EACJ,OAAO,EACP,cAAc,CACf;gBAED,iEAAiE;iBAFhE,CAAA;gBAED,iEAAiE;gBACjE,IAAI,CAAC,KAAK,EAAE;oBACV,SAAQ;iBACT;gBAED,YAAY;gBACZ,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;oBACvB,UAAU;oBACV,IAAI,KAAK,GAAG,+BAAS,CAAC,SAAS,EAAE;wBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;qBAChB;oBACD,WAAW;yBACN,IAAI,CAAC,YAAY,EAAE;wBACtB,SAAQ;qBACT;oBAED,kCAAkC;oBAClC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;oBACjC,MAAM,UAAU,GAAG,CAAC,cAAM,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA,CAAC,CAAC,GAAG,CAC3D,CAAC,CAAC,EAAE,CAAC,IAAI,mCAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,UAAU,CAAC,CAC1D,CAAA;oBACD,KAAK,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;iBACpC;gBACD,OAAO;qBACF,IAAI,KAAK,GAAG,+BAAS,CAAC,IAAI,EAAE;oBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;iBAChB;aACF;QACH,CAAC;KAAA;IAED;;OAEG;IACH,MAAM,CAAO,MAAM,CACjB,QAAgB,EAChB,OAAqB;;YAErB,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,OAAO,CAAC,CAAA;YAE1C,IAAI,UAAU,EAAE;gBACd,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;gBAC1C,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;aACzC;YAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YACrD,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,mBAAmB;gBACnB,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACjC,SAAQ;iBACT;gBACD,UAAU;qBACL;oBACH,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,0BAAO,CAAC,IAAI,CAAC,CAAC,CAAA;iBACxC;aACF;YAED,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAA;YAEzE,OAAO,MAAM,CAAA;QACf,CAAC;KAAA;IAEO,MAAM,CAAO,IAAI,CACvB,IAAiB,EACjB,OAAoB,EACpB,cAAwB;;YAExB,QAAQ;YACR,uEAAuE;YACvE,8CAA8C;YAC9C,IAAI,KAAe,CAAA;YACnB,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBAC/B,IAAI;oBACF,kCAAkC;oBAClC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;iBAC1C;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,IAAI,OAAO,CAAC,uBAAuB,EAAE;4BACnC,IAAI,CAAC,KAAK,CAAC,mBAAmB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAA;4BAC3C,OAAO,SAAS,CAAA;yBACjB;wBAED,MAAM,IAAI,KAAK,CACb,sCAAsC,IAAI,CAAC,IAAI,8CAA8C,CAC9F,CAAA;qBACF;oBAED,MAAM,GAAG,CAAA;iBACV;aACF;iBAAM;gBACL,uCAAuC;gBACvC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;aAC3C;YAED,+DAA+D;YAC/D,IAAI,KAAK,CAAC,WAAW,EAAE,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBACtD,mBAAmB;gBACnB,MAAM,QAAQ,GAAW,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;gBAE9D,oDAAoD;gBACpD,OAAO,cAAc,CAAC,MAAM,IAAI,IAAI,CAAC,KAAK,EAAE;oBAC1C,cAAc,CAAC,GAAG,EAAE,CAAA;iBACrB;gBAED,mBAAmB;gBACnB,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,EAAE;oBACtD,IAAI,CAAC,KAAK,CACR,oCAAoC,IAAI,CAAC,IAAI,mBAAmB,QAAQ,GAAG,CAC5E,CAAA;oBACD,OAAO,SAAS,CAAA;iBACjB;gBAED,6BAA6B;gBAC7B,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;aAC9B;YAED,OAAO,KAAK,CAAA;QACd,CAAC;KAAA;CACF;AAvMD,wCAuMC"}
|
13
node_modules/@actions/glob/lib/internal-match-kind.d.ts
generated
vendored
Normal file
13
node_modules/@actions/glob/lib/internal-match-kind.d.ts
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
/**
|
||||
* Indicates whether a pattern matches a path
|
||||
*/
|
||||
export declare enum MatchKind {
|
||||
/** Not matched */
|
||||
None = 0,
|
||||
/** Matched if the path is a directory */
|
||||
Directory = 1,
|
||||
/** Matched if the path is a regular file */
|
||||
File = 2,
|
||||
/** Matched */
|
||||
All = 3
|
||||
}
|
18
node_modules/@actions/glob/lib/internal-match-kind.js
generated
vendored
Normal file
18
node_modules/@actions/glob/lib/internal-match-kind.js
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.MatchKind = void 0;
|
||||
/**
|
||||
* Indicates whether a pattern matches a path
|
||||
*/
|
||||
var MatchKind;
|
||||
(function (MatchKind) {
|
||||
/** Not matched */
|
||||
MatchKind[MatchKind["None"] = 0] = "None";
|
||||
/** Matched if the path is a directory */
|
||||
MatchKind[MatchKind["Directory"] = 1] = "Directory";
|
||||
/** Matched if the path is a regular file */
|
||||
MatchKind[MatchKind["File"] = 2] = "File";
|
||||
/** Matched */
|
||||
MatchKind[MatchKind["All"] = 3] = "All";
|
||||
})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
|
||||
//# sourceMappingURL=internal-match-kind.js.map
|
1
node_modules/@actions/glob/lib/internal-match-kind.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-match-kind.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-match-kind.js","sourceRoot":"","sources":["../src/internal-match-kind.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,IAAY,SAYX;AAZD,WAAY,SAAS;IACnB,kBAAkB;IAClB,yCAAQ,CAAA;IAER,yCAAyC;IACzC,mDAAa,CAAA;IAEb,4CAA4C;IAC5C,yCAAQ,CAAA;IAER,cAAc;IACd,uCAAsB,CAAA;AACxB,CAAC,EAZW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAYpB"}
|
42
node_modules/@actions/glob/lib/internal-path-helper.d.ts
generated
vendored
Normal file
42
node_modules/@actions/glob/lib/internal-path-helper.d.ts
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
*
|
||||
* For example, on Linux/macOS:
|
||||
* - `/ => /`
|
||||
* - `/hello => /`
|
||||
*
|
||||
* For example, on Windows:
|
||||
* - `C:\ => C:\`
|
||||
* - `C:\hello => C:\`
|
||||
* - `C: => C:`
|
||||
* - `C:hello => C:`
|
||||
* - `\ => \`
|
||||
* - `\hello => \`
|
||||
* - `\\hello => \\hello`
|
||||
* - `\\hello\world => \\hello\world`
|
||||
*/
|
||||
export declare function dirname(p: string): string;
|
||||
/**
|
||||
* Roots the path if not already rooted. On Windows, relative roots like `\`
|
||||
* or `C:` are expanded based on the current working directory.
|
||||
*/
|
||||
export declare function ensureAbsoluteRoot(root: string, itemPath: string): string;
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
export declare function hasAbsoluteRoot(itemPath: string): boolean;
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
export declare function hasRoot(itemPath: string): boolean;
|
||||
/**
|
||||
* Removes redundant slashes and converts `/` to `\` on Windows
|
||||
*/
|
||||
export declare function normalizeSeparators(p: string): string;
|
||||
/**
|
||||
* Normalizes the path separators and trims the trailing separator (when safe).
|
||||
* For example, `/foo/ => /foo` but `/ => /`
|
||||
*/
|
||||
export declare function safeTrimTrailingSeparator(p: string): string;
|
198
node_modules/@actions/glob/lib/internal-path-helper.js
generated
vendored
Normal file
198
node_modules/@actions/glob/lib/internal-path-helper.js
generated
vendored
Normal file
@ -0,0 +1,198 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
*
|
||||
* For example, on Linux/macOS:
|
||||
* - `/ => /`
|
||||
* - `/hello => /`
|
||||
*
|
||||
* For example, on Windows:
|
||||
* - `C:\ => C:\`
|
||||
* - `C:\hello => C:\`
|
||||
* - `C: => C:`
|
||||
* - `C:hello => C:`
|
||||
* - `\ => \`
|
||||
* - `\hello => \`
|
||||
* - `\\hello => \\hello`
|
||||
* - `\\hello\world => \\hello\world`
|
||||
*/
|
||||
function dirname(p) {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
p = safeTrimTrailingSeparator(p);
|
||||
// Windows UNC root, e.g. \\hello or \\hello\world
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) {
|
||||
return p;
|
||||
}
|
||||
// Get dirname
|
||||
let result = path.dirname(p);
|
||||
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
|
||||
if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) {
|
||||
result = safeTrimTrailingSeparator(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.dirname = dirname;
|
||||
/**
|
||||
* Roots the path if not already rooted. On Windows, relative roots like `\`
|
||||
* or `C:` are expanded based on the current working directory.
|
||||
*/
|
||||
function ensureAbsoluteRoot(root, itemPath) {
|
||||
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
// Already rooted
|
||||
if (hasAbsoluteRoot(itemPath)) {
|
||||
return itemPath;
|
||||
}
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Check for itemPath like C: or C:foo
|
||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||
let cwd = process.cwd();
|
||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
// Drive letter matches cwd? Expand to cwd
|
||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||
// Drive only, e.g. C:
|
||||
if (itemPath.length === 2) {
|
||||
// Preserve specified drive letter case (upper or lower)
|
||||
return `${itemPath[0]}:\\${cwd.substr(3)}`;
|
||||
}
|
||||
// Drive + path, e.g. C:foo
|
||||
else {
|
||||
if (!cwd.endsWith('\\')) {
|
||||
cwd += '\\';
|
||||
}
|
||||
// Preserve specified drive letter case (upper or lower)
|
||||
return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`;
|
||||
}
|
||||
}
|
||||
// Different drive
|
||||
else {
|
||||
return `${itemPath[0]}:\\${itemPath.substr(2)}`;
|
||||
}
|
||||
}
|
||||
// Check for itemPath like \ or \foo
|
||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||
const cwd = process.cwd();
|
||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||
}
|
||||
}
|
||||
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||
// Otherwise ensure root ends with a separator
|
||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||
// Intentionally empty
|
||||
}
|
||||
else {
|
||||
// Append separator
|
||||
root += path.sep;
|
||||
}
|
||||
return root + itemPath;
|
||||
}
|
||||
exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
function hasAbsoluteRoot(itemPath) {
|
||||
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath);
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// E.g. \\hello\share or C:\hello
|
||||
return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath);
|
||||
}
|
||||
// E.g. /hello
|
||||
return itemPath.startsWith('/');
|
||||
}
|
||||
exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
||||
/**
|
||||
* On Linux/macOS, true if path starts with `/`. On Windows, true for paths like:
|
||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||
*/
|
||||
function hasRoot(itemPath) {
|
||||
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||
// Normalize separators
|
||||
itemPath = normalizeSeparators(itemPath);
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// E.g. \ or \hello or \\hello
|
||||
// E.g. C: or C:\hello
|
||||
return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath);
|
||||
}
|
||||
// E.g. /hello
|
||||
return itemPath.startsWith('/');
|
||||
}
|
||||
exports.hasRoot = hasRoot;
|
||||
/**
|
||||
* Removes redundant slashes and converts `/` to `\` on Windows
|
||||
*/
|
||||
function normalizeSeparators(p) {
|
||||
p = p || '';
|
||||
// Windows
|
||||
if (IS_WINDOWS) {
|
||||
// Convert slashes on Windows
|
||||
p = p.replace(/\//g, '\\');
|
||||
// Remove redundant slashes
|
||||
const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello
|
||||
return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC
|
||||
}
|
||||
// Remove redundant slashes
|
||||
return p.replace(/\/\/+/g, '/');
|
||||
}
|
||||
exports.normalizeSeparators = normalizeSeparators;
|
||||
/**
|
||||
* Normalizes the path separators and trims the trailing separator (when safe).
|
||||
* For example, `/foo/ => /foo` but `/ => /`
|
||||
*/
|
||||
function safeTrimTrailingSeparator(p) {
|
||||
// Short-circuit if empty
|
||||
if (!p) {
|
||||
return '';
|
||||
}
|
||||
// Normalize separators
|
||||
p = normalizeSeparators(p);
|
||||
// No trailing slash
|
||||
if (!p.endsWith(path.sep)) {
|
||||
return p;
|
||||
}
|
||||
// Check '/' on Linux/macOS and '\' on Windows
|
||||
if (p === path.sep) {
|
||||
return p;
|
||||
}
|
||||
// On Windows check if drive root. E.g. C:\
|
||||
if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) {
|
||||
return p;
|
||||
}
|
||||
// Otherwise trim trailing slash
|
||||
return p.substr(0, p.length - 1);
|
||||
}
|
||||
exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||||
//# sourceMappingURL=internal-path-helper.js.map
|
1
node_modules/@actions/glob/lib/internal-path-helper.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-path-helper.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-path-helper.js","sourceRoot":"","sources":["../src/internal-path-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;;;;;;;;;;;;;;GAgBG;AACH,SAAgB,OAAO,CAAC,CAAS;IAC/B,wDAAwD;IACxD,CAAC,GAAG,yBAAyB,CAAC,CAAC,CAAC,CAAA;IAEhC,kDAAkD;IAClD,IAAI,UAAU,IAAI,yBAAyB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACnD,OAAO,CAAC,CAAA;KACT;IAED,cAAc;IACd,IAAI,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;IAE5B,gEAAgE;IAChE,IAAI,UAAU,IAAI,wBAAwB,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACvD,MAAM,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAA;KAC3C;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAlBD,0BAkBC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,IAAY,EAAE,QAAgB;IAC/D,gBAAM,CAAC,IAAI,EAAE,uDAAuD,CAAC,CAAA;IACrE,gBAAM,CAAC,QAAQ,EAAE,2DAA2D,CAAC,CAAA;IAE7E,iBAAiB;IACjB,IAAI,eAAe,CAAC,QAAQ,CAAC,EAAE;QAC7B,OAAO,QAAQ,CAAA;KAChB;IAED,UAAU;IACV,IAAI,UAAU,EAAE;QACd,sCAAsC;QACtC,IAAI,QAAQ,CAAC,KAAK,CAAC,yBAAyB,CAAC,EAAE;YAC7C,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACvB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,0CAA0C;YAC1C,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE;gBACtD,sBAAsB;gBACtB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;oBACzB,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAC3C;gBACD,2BAA2B;qBACtB;oBACH,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;wBACvB,GAAG,IAAI,IAAI,CAAA;qBACZ;oBACD,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAChE;aACF;YACD,kBAAkB;iBACb;gBACH,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;aAChD;SACF;QACD,oCAAoC;aAC/B,IAAI,mBAAmB,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE;YAC7D,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACzB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,OAAO,GAAG,GAAG,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;SAC3C;KACF;IAED,gBAAM,CACJ,eAAe,CAAC,IAAI,CAAC,EACrB,gEAAgE,CACjE,CAAA;IAED,8CAA8C;IAC9C,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE;QAC7D,sBAAsB;KACvB;SAAM;QACL,mBAAmB;QACnB,IAAI,IAAI,IAAI,CAAC,GAAG,CAAA;KACjB;IAED,OAAO,IAAI,GAAG,QAAQ,CAAA;AACxB,CAAC;AAlED,gDAkEC;AAED;;;GAGG;AACH,SAAgB,eAAe,CAAC,QAAgB;IAC9C,gBAAM,CAAC,QAAQ,EAAE,wDAAwD,CAAC,CAAA;IAE1E,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,iCAAiC;QACjC,OAAO,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAClE;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAdD,0CAcC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,QAAgB;IACtC,gBAAM,CAAC,QAAQ,EAAE,iDAAiD,CAAC,CAAA;IAEnE,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,8BAA8B;QAC9B,sBAAsB;QACtB,OAAO,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAC9D;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,0BAeC;AAED;;GAEG;AACH,SAAgB,mBAAmB,CAAC,CAAS;IAC3C,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IAEX,UAAU;IACV,IAAI,UAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,MAAM,KAAK,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA,CAAC,eAAe;QACnD,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA,CAAC,8BAA8B;KACtF;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,kDAeC;AAED;;;GAGG;AACH,SAAgB,yBAAyB,CAAC,CAAS;IACjD,yBAAyB;IACzB,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,EAAE,CAAA;KACV;IAED,uBAAuB;IACvB,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAE1B,oBAAoB;IACpB,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,OAAO,CAAC,CAAA;KACT;IAED,8CAA8C;IAC9C,IAAI,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;QAClB,OAAO,CAAC,CAAA;KACT;IAED,2CAA2C;IAC3C,IAAI,UAAU,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACvC,OAAO,CAAC,CAAA;KACT;IAED,gCAAgC;IAChC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;AAClC,CAAC;AA1BD,8DA0BC"}
|
15
node_modules/@actions/glob/lib/internal-path.d.ts
generated
vendored
Normal file
15
node_modules/@actions/glob/lib/internal-path.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
*/
|
||||
export declare class Path {
|
||||
segments: string[];
|
||||
/**
|
||||
* Constructs a Path
|
||||
* @param itemPath Path or array of segments
|
||||
*/
|
||||
constructor(itemPath: string | string[]);
|
||||
/**
|
||||
* Converts the path to it's string representation
|
||||
*/
|
||||
toString(): string;
|
||||
}
|
113
node_modules/@actions/glob/lib/internal-path.js
generated
vendored
Normal file
113
node_modules/@actions/glob/lib/internal-path.js
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Path = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const pathHelper = __importStar(require("./internal-path-helper"));
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
*/
|
||||
class Path {
|
||||
/**
|
||||
* Constructs a Path
|
||||
* @param itemPath Path or array of segments
|
||||
*/
|
||||
constructor(itemPath) {
|
||||
this.segments = [];
|
||||
// String
|
||||
if (typeof itemPath === 'string') {
|
||||
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
// Not rooted
|
||||
if (!pathHelper.hasRoot(itemPath)) {
|
||||
this.segments = itemPath.split(path.sep);
|
||||
}
|
||||
// Rooted
|
||||
else {
|
||||
// Add all segments, while not at the root
|
||||
let remaining = itemPath;
|
||||
let dir = pathHelper.dirname(remaining);
|
||||
while (dir !== remaining) {
|
||||
// Add the segment
|
||||
const basename = path.basename(remaining);
|
||||
this.segments.unshift(basename);
|
||||
// Truncate the last segment
|
||||
remaining = dir;
|
||||
dir = pathHelper.dirname(remaining);
|
||||
}
|
||||
// Remainder is the root
|
||||
this.segments.unshift(remaining);
|
||||
}
|
||||
}
|
||||
// Array
|
||||
else {
|
||||
// Must not be empty
|
||||
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||
// Each segment
|
||||
for (let i = 0; i < itemPath.length; i++) {
|
||||
let segment = itemPath[i];
|
||||
// Must not be empty
|
||||
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||
// Normalize slashes
|
||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||
// Root segment
|
||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||
this.segments.push(segment);
|
||||
}
|
||||
// All other segments
|
||||
else {
|
||||
// Must not contain slash
|
||||
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||
this.segments.push(segment);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Converts the path to it's string representation
|
||||
*/
|
||||
toString() {
|
||||
// First segment
|
||||
let result = this.segments[0];
|
||||
// All others
|
||||
let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
|
||||
for (let i = 1; i < this.segments.length; i++) {
|
||||
if (skipSlash) {
|
||||
skipSlash = false;
|
||||
}
|
||||
else {
|
||||
result += path.sep;
|
||||
}
|
||||
result += this.segments[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
exports.Path = Path;
|
||||
//# sourceMappingURL=internal-path.js.map
|
1
node_modules/@actions/glob/lib/internal-path.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-path.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-path.js","sourceRoot":"","sources":["../src/internal-path.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;GAEG;AACH,MAAa,IAAI;IAGf;;;OAGG;IACH,YAAY,QAA2B;QANvC,aAAQ,GAAa,EAAE,CAAA;QAOrB,SAAS;QACT,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;YAChC,gBAAM,CAAC,QAAQ,EAAE,wCAAwC,CAAC,CAAA;YAE1D,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;YAEzD,aAAa;YACb,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;gBACjC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;aACzC;YACD,SAAS;iBACJ;gBACH,0CAA0C;gBAC1C,IAAI,SAAS,GAAG,QAAQ,CAAA;gBACxB,IAAI,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;gBACvC,OAAO,GAAG,KAAK,SAAS,EAAE;oBACxB,kBAAkB;oBAClB,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;oBACzC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;oBAE/B,4BAA4B;oBAC5B,SAAS,GAAG,GAAG,CAAA;oBACf,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;iBACpC;gBAED,wBAAwB;gBACxB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;aACjC;SACF;QACD,QAAQ;aACH;YACH,oBAAoB;YACpB,gBAAM,CACJ,QAAQ,CAAC,MAAM,GAAG,CAAC,EACnB,iDAAiD,CAClD,CAAA;YAED,eAAe;YACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACxC,IAAI,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAA;gBAEzB,oBAAoB;gBACpB,gBAAM,CACJ,OAAO,EACP,0DAA0D,CAC3D,CAAA;gBAED,oBAAoB;gBACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;gBAErD,eAAe;gBACf,IAAI,CAAC,KAAK,CAAC,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;oBAC1C,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;oBACvD,gBAAM,CACJ,OAAO,KAAK,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EACvC,8EAA8E,CAC/E,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;gBACD,qBAAqB;qBAChB;oBACH,yBAAyB;oBACzB,gBAAM,CACJ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAC3B,0DAA0D,CAC3D,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;aACF;SACF;IACH,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,gBAAgB;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;QAE7B,aAAa;QACb,IAAI,SAAS,GACX,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAA;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,IAAI,SAAS,EAAE;gBACb,SAAS,GAAG,KAAK,CAAA;aAClB;iBAAM;gBACL,MAAM,IAAI,IAAI,CAAC,GAAG,CAAA;aACnB;YAED,MAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;SAC3B;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAvGD,oBAuGC"}
|
15
node_modules/@actions/glob/lib/internal-pattern-helper.d.ts
generated
vendored
Normal file
15
node_modules/@actions/glob/lib/internal-pattern-helper.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
import { MatchKind } from './internal-match-kind';
|
||||
import { Pattern } from './internal-pattern';
|
||||
/**
|
||||
* Given an array of patterns, returns an array of paths to search.
|
||||
* Duplicates and paths under other included paths are filtered out.
|
||||
*/
|
||||
export declare function getSearchPaths(patterns: Pattern[]): string[];
|
||||
/**
|
||||
* Matches the patterns against the path
|
||||
*/
|
||||
export declare function match(patterns: Pattern[], itemPath: string): MatchKind;
|
||||
/**
|
||||
* Checks whether to descend further into the directory
|
||||
*/
|
||||
export declare function partialMatch(patterns: Pattern[], itemPath: string): boolean;
|
94
node_modules/@actions/glob/lib/internal-pattern-helper.js
generated
vendored
Normal file
94
node_modules/@actions/glob/lib/internal-pattern-helper.js
generated
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||
const pathHelper = __importStar(require("./internal-path-helper"));
|
||||
const internal_match_kind_1 = require("./internal-match-kind");
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Given an array of patterns, returns an array of paths to search.
|
||||
* Duplicates and paths under other included paths are filtered out.
|
||||
*/
|
||||
function getSearchPaths(patterns) {
|
||||
// Ignore negate patterns
|
||||
patterns = patterns.filter(x => !x.negate);
|
||||
// Create a map of all search paths
|
||||
const searchPathMap = {};
|
||||
for (const pattern of patterns) {
|
||||
const key = IS_WINDOWS
|
||||
? pattern.searchPath.toUpperCase()
|
||||
: pattern.searchPath;
|
||||
searchPathMap[key] = 'candidate';
|
||||
}
|
||||
const result = [];
|
||||
for (const pattern of patterns) {
|
||||
// Check if already included
|
||||
const key = IS_WINDOWS
|
||||
? pattern.searchPath.toUpperCase()
|
||||
: pattern.searchPath;
|
||||
if (searchPathMap[key] === 'included') {
|
||||
continue;
|
||||
}
|
||||
// Check for an ancestor search path
|
||||
let foundAncestor = false;
|
||||
let tempKey = key;
|
||||
let parent = pathHelper.dirname(tempKey);
|
||||
while (parent !== tempKey) {
|
||||
if (searchPathMap[parent]) {
|
||||
foundAncestor = true;
|
||||
break;
|
||||
}
|
||||
tempKey = parent;
|
||||
parent = pathHelper.dirname(tempKey);
|
||||
}
|
||||
// Include the search pattern in the result
|
||||
if (!foundAncestor) {
|
||||
result.push(pattern.searchPath);
|
||||
searchPathMap[key] = 'included';
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.getSearchPaths = getSearchPaths;
|
||||
/**
|
||||
* Matches the patterns against the path
|
||||
*/
|
||||
function match(patterns, itemPath) {
|
||||
let result = internal_match_kind_1.MatchKind.None;
|
||||
for (const pattern of patterns) {
|
||||
if (pattern.negate) {
|
||||
result &= ~pattern.match(itemPath);
|
||||
}
|
||||
else {
|
||||
result |= pattern.match(itemPath);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.match = match;
|
||||
/**
|
||||
* Checks whether to descend further into the directory
|
||||
*/
|
||||
function partialMatch(patterns, itemPath) {
|
||||
return patterns.some(x => !x.negate && x.partialMatch(itemPath));
|
||||
}
|
||||
exports.partialMatch = partialMatch;
|
||||
//# sourceMappingURL=internal-pattern-helper.js.map
|
1
node_modules/@actions/glob/lib/internal-pattern-helper.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-pattern-helper.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-pattern-helper.js","sourceRoot":"","sources":["../src/internal-pattern-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mEAAoD;AACpD,+DAA+C;AAG/C,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;GAGG;AACH,SAAgB,cAAc,CAAC,QAAmB;IAChD,yBAAyB;IACzB,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAA;IAE1C,mCAAmC;IACnC,MAAM,aAAa,GAA4B,EAAE,CAAA;IACjD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,aAAa,CAAC,GAAG,CAAC,GAAG,WAAW,CAAA;KACjC;IAED,MAAM,MAAM,GAAa,EAAE,CAAA;IAE3B,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,4BAA4B;QAC5B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,UAAU,EAAE;YACrC,SAAQ;SACT;QAED,oCAAoC;QACpC,IAAI,aAAa,GAAG,KAAK,CAAA;QACzB,IAAI,OAAO,GAAG,GAAG,CAAA;QACjB,IAAI,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;QACxC,OAAO,MAAM,KAAK,OAAO,EAAE;YACzB,IAAI,aAAa,CAAC,MAAM,CAAC,EAAE;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,MAAK;aACN;YAED,OAAO,GAAG,MAAM,CAAA;YAChB,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;SACrC;QAED,2CAA2C;QAC3C,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;YAC/B,aAAa,CAAC,GAAG,CAAC,GAAG,UAAU,CAAA;SAChC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,wCA8CC;AAED;;GAEG;AACH,SAAgB,KAAK,CAAC,QAAmB,EAAE,QAAgB;IACzD,IAAI,MAAM,GAAc,+BAAS,CAAC,IAAI,CAAA;IAEtC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SACnC;aAAM;YACL,MAAM,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SAClC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAZD,sBAYC;AAED;;GAEG;AACH,SAAgB,YAAY,CAAC,QAAmB,EAAE,QAAgB;IAChE,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAA;AAClE,CAAC;AAFD,oCAEC"}
|
64
node_modules/@actions/glob/lib/internal-pattern.d.ts
generated
vendored
Normal file
64
node_modules/@actions/glob/lib/internal-pattern.d.ts
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
import { MatchKind } from './internal-match-kind';
|
||||
export declare class Pattern {
|
||||
/**
|
||||
* Indicates whether matches should be excluded from the result set
|
||||
*/
|
||||
readonly negate: boolean;
|
||||
/**
|
||||
* The directory to search. The literal path prior to the first glob segment.
|
||||
*/
|
||||
readonly searchPath: string;
|
||||
/**
|
||||
* The path/pattern segments. Note, only the first segment (the root directory)
|
||||
* may contain a directory separator character. Use the trailingSeparator field
|
||||
* to determine whether the pattern ended with a trailing slash.
|
||||
*/
|
||||
readonly segments: string[];
|
||||
/**
|
||||
* Indicates the pattern should only match directories, not regular files.
|
||||
*/
|
||||
readonly trailingSeparator: boolean;
|
||||
/**
|
||||
* The Minimatch object used for matching
|
||||
*/
|
||||
private readonly minimatch;
|
||||
/**
|
||||
* Used to workaround a limitation with Minimatch when determining a partial
|
||||
* match and the path is a root directory. For example, when the pattern is
|
||||
* `/foo/**` or `C:\foo\**` and the path is `/` or `C:\`.
|
||||
*/
|
||||
private readonly rootRegExp;
|
||||
/**
|
||||
* Indicates that the pattern is implicitly added as opposed to user specified.
|
||||
*/
|
||||
private readonly isImplicitPattern;
|
||||
constructor(pattern: string);
|
||||
constructor(pattern: string, isImplicitPattern: boolean, segments: undefined, homedir: string);
|
||||
constructor(negate: boolean, isImplicitPattern: boolean, segments: string[], homedir?: string);
|
||||
/**
|
||||
* Matches the pattern against the specified path
|
||||
*/
|
||||
match(itemPath: string): MatchKind;
|
||||
/**
|
||||
* Indicates whether the pattern may match descendants of the specified path
|
||||
*/
|
||||
partialMatch(itemPath: string): boolean;
|
||||
/**
|
||||
* Escapes glob patterns within a path
|
||||
*/
|
||||
static globEscape(s: string): string;
|
||||
/**
|
||||
* Normalizes slashes and ensures absolute root
|
||||
*/
|
||||
private static fixupPattern;
|
||||
/**
|
||||
* Attempts to unescape a pattern segment to create a literal path segment.
|
||||
* Otherwise returns empty string.
|
||||
*/
|
||||
private static getLiteral;
|
||||
/**
|
||||
* Escapes regexp special characters
|
||||
* https://javascript.info/regexp-escaping
|
||||
*/
|
||||
private static regExpEscape;
|
||||
}
|
255
node_modules/@actions/glob/lib/internal-pattern.js
generated
vendored
Normal file
255
node_modules/@actions/glob/lib/internal-pattern.js
generated
vendored
Normal file
@ -0,0 +1,255 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Pattern = void 0;
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const pathHelper = __importStar(require("./internal-path-helper"));
|
||||
const assert_1 = __importDefault(require("assert"));
|
||||
const minimatch_1 = require("minimatch");
|
||||
const internal_match_kind_1 = require("./internal-match-kind");
|
||||
const internal_path_1 = require("./internal-path");
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class Pattern {
|
||||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||
/**
|
||||
* Indicates whether matches should be excluded from the result set
|
||||
*/
|
||||
this.negate = false;
|
||||
// Pattern overload
|
||||
let pattern;
|
||||
if (typeof patternOrNegate === 'string') {
|
||||
pattern = patternOrNegate.trim();
|
||||
}
|
||||
// Segments overload
|
||||
else {
|
||||
// Convert to pattern
|
||||
segments = segments || [];
|
||||
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||||
const root = Pattern.getLiteral(segments[0]);
|
||||
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||
if (patternOrNegate) {
|
||||
pattern = `!${pattern}`;
|
||||
}
|
||||
}
|
||||
// Negate
|
||||
while (pattern.startsWith('!')) {
|
||||
this.negate = !this.negate;
|
||||
pattern = pattern.substr(1).trim();
|
||||
}
|
||||
// Normalize slashes and ensures absolute root
|
||||
pattern = Pattern.fixupPattern(pattern, homedir);
|
||||
// Segments
|
||||
this.segments = new internal_path_1.Path(pattern).segments;
|
||||
// Trailing slash indicates the pattern should only match directories, not regular files
|
||||
this.trailingSeparator = pathHelper
|
||||
.normalizeSeparators(pattern)
|
||||
.endsWith(path.sep);
|
||||
pattern = pathHelper.safeTrimTrailingSeparator(pattern);
|
||||
// Search path (literal path prior to the first glob segment)
|
||||
let foundGlob = false;
|
||||
const searchSegments = this.segments
|
||||
.map(x => Pattern.getLiteral(x))
|
||||
.filter(x => !foundGlob && !(foundGlob = x === ''));
|
||||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||||
// Root RegExp (required when determining partial match)
|
||||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||||
this.isImplicitPattern = isImplicitPattern;
|
||||
// Create minimatch
|
||||
const minimatchOptions = {
|
||||
dot: true,
|
||||
nobrace: true,
|
||||
nocase: IS_WINDOWS,
|
||||
nocomment: true,
|
||||
noext: true,
|
||||
nonegate: true
|
||||
};
|
||||
pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern;
|
||||
this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions);
|
||||
}
|
||||
/**
|
||||
* Matches the pattern against the specified path
|
||||
*/
|
||||
match(itemPath) {
|
||||
// Last segment is globstar?
|
||||
if (this.segments[this.segments.length - 1] === '**') {
|
||||
// Normalize slashes
|
||||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||||
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||||
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||||
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||||
itemPath = `${itemPath}${path.sep}`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
}
|
||||
// Match
|
||||
if (this.minimatch.match(itemPath)) {
|
||||
return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All;
|
||||
}
|
||||
return internal_match_kind_1.MatchKind.None;
|
||||
}
|
||||
/**
|
||||
* Indicates whether the pattern may match descendants of the specified path
|
||||
*/
|
||||
partialMatch(itemPath) {
|
||||
// Normalize slashes and trim unnecessary trailing slash
|
||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||
// matchOne does not handle root path correctly
|
||||
if (pathHelper.dirname(itemPath) === itemPath) {
|
||||
return this.rootRegExp.test(itemPath);
|
||||
}
|
||||
return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true);
|
||||
}
|
||||
/**
|
||||
* Escapes glob patterns within a path
|
||||
*/
|
||||
static globEscape(s) {
|
||||
return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS
|
||||
.replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment
|
||||
.replace(/\?/g, '[?]') // escape '?'
|
||||
.replace(/\*/g, '[*]'); // escape '*'
|
||||
}
|
||||
/**
|
||||
* Normalizes slashes and ensures absolute root
|
||||
*/
|
||||
static fixupPattern(pattern, homedir) {
|
||||
// Empty
|
||||
assert_1.default(pattern, 'pattern cannot be empty');
|
||||
// Must not contain `.` segment, unless first segment
|
||||
// Must not contain `..` segment
|
||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||
// Normalize slashes
|
||||
pattern = pathHelper.normalizeSeparators(pattern);
|
||||
// Replace leading `.` segment
|
||||
if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) {
|
||||
pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1);
|
||||
}
|
||||
// Replace leading `~` segment
|
||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||
homedir = homedir || os.homedir();
|
||||
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||||
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||
}
|
||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||
else if (IS_WINDOWS &&
|
||||
(pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) {
|
||||
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2));
|
||||
if (pattern.length > 2 && !root.endsWith('\\')) {
|
||||
root += '\\';
|
||||
}
|
||||
pattern = Pattern.globEscape(root) + pattern.substr(2);
|
||||
}
|
||||
// Replace relative root, e.g. pattern is \ or \foo
|
||||
else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) {
|
||||
let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\');
|
||||
if (!root.endsWith('\\')) {
|
||||
root += '\\';
|
||||
}
|
||||
pattern = Pattern.globEscape(root) + pattern.substr(1);
|
||||
}
|
||||
// Otherwise ensure absolute root
|
||||
else {
|
||||
pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern);
|
||||
}
|
||||
return pathHelper.normalizeSeparators(pattern);
|
||||
}
|
||||
/**
|
||||
* Attempts to unescape a pattern segment to create a literal path segment.
|
||||
* Otherwise returns empty string.
|
||||
*/
|
||||
static getLiteral(segment) {
|
||||
let literal = '';
|
||||
for (let i = 0; i < segment.length; i++) {
|
||||
const c = segment[i];
|
||||
// Escape
|
||||
if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) {
|
||||
literal += segment[++i];
|
||||
continue;
|
||||
}
|
||||
// Wildcard
|
||||
else if (c === '*' || c === '?') {
|
||||
return '';
|
||||
}
|
||||
// Character set
|
||||
else if (c === '[' && i + 1 < segment.length) {
|
||||
let set = '';
|
||||
let closed = -1;
|
||||
for (let i2 = i + 1; i2 < segment.length; i2++) {
|
||||
const c2 = segment[i2];
|
||||
// Escape
|
||||
if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) {
|
||||
set += segment[++i2];
|
||||
continue;
|
||||
}
|
||||
// Closed
|
||||
else if (c2 === ']') {
|
||||
closed = i2;
|
||||
break;
|
||||
}
|
||||
// Otherwise
|
||||
else {
|
||||
set += c2;
|
||||
}
|
||||
}
|
||||
// Closed?
|
||||
if (closed >= 0) {
|
||||
// Cannot convert
|
||||
if (set.length > 1) {
|
||||
return '';
|
||||
}
|
||||
// Convert to literal
|
||||
if (set) {
|
||||
literal += set;
|
||||
i = closed;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Otherwise fall thru
|
||||
}
|
||||
// Append
|
||||
literal += c;
|
||||
}
|
||||
return literal;
|
||||
}
|
||||
/**
|
||||
* Escapes regexp special characters
|
||||
* https://javascript.info/regexp-escaping
|
||||
*/
|
||||
static regExpEscape(s) {
|
||||
return s.replace(/[[\\^$.|?*+()]/g, '\\$&');
|
||||
}
|
||||
}
|
||||
exports.Pattern = Pattern;
|
||||
//# sourceMappingURL=internal-pattern.js.map
|
1
node_modules/@actions/glob/lib/internal-pattern.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-pattern.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
5
node_modules/@actions/glob/lib/internal-search-state.d.ts
generated
vendored
Normal file
5
node_modules/@actions/glob/lib/internal-search-state.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export declare class SearchState {
|
||||
readonly path: string;
|
||||
readonly level: number;
|
||||
constructor(path: string, level: number);
|
||||
}
|
11
node_modules/@actions/glob/lib/internal-search-state.js
generated
vendored
Normal file
11
node_modules/@actions/glob/lib/internal-search-state.js
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SearchState = void 0;
|
||||
class SearchState {
|
||||
constructor(path, level) {
|
||||
this.path = path;
|
||||
this.level = level;
|
||||
}
|
||||
}
|
||||
exports.SearchState = SearchState;
|
||||
//# sourceMappingURL=internal-search-state.js.map
|
1
node_modules/@actions/glob/lib/internal-search-state.js.map
generated
vendored
Normal file
1
node_modules/@actions/glob/lib/internal-search-state.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"internal-search-state.js","sourceRoot":"","sources":["../src/internal-search-state.ts"],"names":[],"mappings":";;;AAAA,MAAa,WAAW;IAItB,YAAY,IAAY,EAAE,KAAa;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;CACF;AARD,kCAQC"}
|
43
node_modules/@actions/glob/package.json
generated
vendored
Normal file
43
node_modules/@actions/glob/package.json
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "@actions/glob",
|
||||
"version": "0.1.2",
|
||||
"preview": true,
|
||||
"description": "Actions glob lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"glob"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/main/packages/glob",
|
||||
"license": "MIT",
|
||||
"main": "lib/glob.js",
|
||||
"types": "lib/glob.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib",
|
||||
"!.DS_Store"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/glob"
|
||||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"minimatch": "^3.0.4"
|
||||
}
|
||||
}
|
2
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
2
node_modules/@actions/http-client/lib/auth.js.map
generated
vendored
@ -1 +1 @@
|
||||
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAI/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA3BD,oFA2BC"}
|
||||
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAK/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA5BD,oFA4BC"}
|
7
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
7
node_modules/@actions/http-client/lib/index.d.ts
generated
vendored
@ -1,6 +1,9 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import * as http from 'http';
|
||||
import * as ifm from './interfaces';
|
||||
import { ProxyAgent } from 'undici';
|
||||
export declare enum HttpCodes {
|
||||
OK = 200,
|
||||
MultipleChoices = 300,
|
||||
@ -51,6 +54,7 @@ export declare class HttpClientResponse {
|
||||
constructor(message: http.IncomingMessage);
|
||||
message: http.IncomingMessage;
|
||||
readBody(): Promise<string>;
|
||||
readBodyBuffer?(): Promise<Buffer>;
|
||||
}
|
||||
export declare function isHttps(requestUrl: string): boolean;
|
||||
export declare class HttpClient {
|
||||
@ -66,6 +70,7 @@ export declare class HttpClient {
|
||||
private _maxRetries;
|
||||
private _agent;
|
||||
private _proxyAgent;
|
||||
private _proxyAgentDispatcher;
|
||||
private _keepAlive;
|
||||
private _disposed;
|
||||
constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions);
|
||||
@ -114,10 +119,12 @@ export declare class HttpClient {
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
getAgent(serverUrl: string): http.Agent;
|
||||
getAgentDispatcher(serverUrl: string): ProxyAgent | undefined;
|
||||
private _prepareRequest;
|
||||
private _mergeHeaders;
|
||||
private _getExistingOrDefaultHeader;
|
||||
private _getAgent;
|
||||
private _getProxyAgentDispatcher;
|
||||
private _performExponentialBackoff;
|
||||
private _processResponse;
|
||||
}
|
||||
|
71
node_modules/@actions/http-client/lib/index.js
generated
vendored
71
node_modules/@actions/http-client/lib/index.js
generated
vendored
@ -2,7 +2,11 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
@ -15,7 +19,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
@ -34,6 +38,7 @@ const http = __importStar(require("http"));
|
||||
const https = __importStar(require("https"));
|
||||
const pm = __importStar(require("./proxy"));
|
||||
const tunnel = __importStar(require("tunnel"));
|
||||
const undici_1 = require("undici");
|
||||
var HttpCodes;
|
||||
(function (HttpCodes) {
|
||||
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||
@ -63,16 +68,16 @@ var HttpCodes;
|
||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||
})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
|
||||
var Headers;
|
||||
(function (Headers) {
|
||||
Headers["Accept"] = "accept";
|
||||
Headers["ContentType"] = "content-type";
|
||||
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||
})(Headers || (exports.Headers = Headers = {}));
|
||||
var MediaTypes;
|
||||
(function (MediaTypes) {
|
||||
MediaTypes["ApplicationJson"] = "application/json";
|
||||
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||
})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
@ -123,6 +128,19 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@ -428,6 +446,15 @@ class HttpClient {
|
||||
const parsedUrl = new URL(serverUrl);
|
||||
return this._getAgent(parsedUrl);
|
||||
}
|
||||
getAgentDispatcher(serverUrl) {
|
||||
const parsedUrl = new URL(serverUrl);
|
||||
const proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||
const useProxy = proxyUrl && proxyUrl.hostname;
|
||||
if (!useProxy) {
|
||||
return;
|
||||
}
|
||||
return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
|
||||
}
|
||||
_prepareRequest(method, requestUrl, headers) {
|
||||
const info = {};
|
||||
info.parsedUrl = requestUrl;
|
||||
@ -475,7 +502,7 @@ class HttpClient {
|
||||
if (this._keepAlive && useProxy) {
|
||||
agent = this._proxyAgent;
|
||||
}
|
||||
if (this._keepAlive && !useProxy) {
|
||||
if (!useProxy) {
|
||||
agent = this._agent;
|
||||
}
|
||||
// if agent is already assigned use that agent.
|
||||
@ -507,16 +534,12 @@ class HttpClient {
|
||||
agent = tunnelAgent(agentOptions);
|
||||
this._proxyAgent = agent;
|
||||
}
|
||||
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||
if (this._keepAlive && !agent) {
|
||||
// if tunneling agent isn't assigned create a new agent
|
||||
if (!agent) {
|
||||
const options = { keepAlive: this._keepAlive, maxSockets };
|
||||
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||
this._agent = agent;
|
||||
}
|
||||
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||
if (!agent) {
|
||||
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||
}
|
||||
if (usingSsl && this._ignoreSslError) {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
@ -527,6 +550,30 @@ class HttpClient {
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
_getProxyAgentDispatcher(parsedUrl, proxyUrl) {
|
||||
let proxyAgent;
|
||||
if (this._keepAlive) {
|
||||
proxyAgent = this._proxyAgentDispatcher;
|
||||
}
|
||||
// if agent is already assigned use that agent.
|
||||
if (proxyAgent) {
|
||||
return proxyAgent;
|
||||
}
|
||||
const usingSsl = parsedUrl.protocol === 'https:';
|
||||
proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
|
||||
token: `${proxyUrl.username}:${proxyUrl.password}`
|
||||
})));
|
||||
this._proxyAgentDispatcher = proxyAgent;
|
||||
if (usingSsl && this._ignoreSslError) {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
}
|
||||
return proxyAgent;
|
||||
}
|
||||
_performExponentialBackoff(retryNumber) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||||
|
2
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
2
node_modules/@actions/http-client/lib/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
2
node_modules/@actions/http-client/lib/interfaces.d.ts
generated
vendored
@ -1,4 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import * as http from 'http';
|
||||
import * as https from 'https';
|
||||
import { HttpClientResponse } from './index';
|
||||
|
8
node_modules/@actions/http-client/lib/proxy.js
generated
vendored
8
node_modules/@actions/http-client/lib/proxy.js
generated
vendored
@ -15,7 +15,13 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
return new URL(proxyVar);
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
|
2
node_modules/@actions/http-client/lib/proxy.js.map
generated
vendored
2
node_modules/@actions/http-client/lib/proxy.js.map
generated
vendored
@ -1 +1 @@
|
||||
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;KACzB;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AApBD,kCAoBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"}
|
||||
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,IAAI;YACF,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;SACzB;QAAC,WAAM;YACN,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;gBACrE,OAAO,IAAI,GAAG,CAAC,UAAU,QAAQ,EAAE,CAAC,CAAA;SACvC;KACF;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AAzBD,kCAyBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"}
|
11
node_modules/@actions/http-client/package.json
generated
vendored
11
node_modules/@actions/http-client/package.json
generated
vendored
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@actions/http-client",
|
||||
"version": "2.1.0",
|
||||
"version": "2.2.1",
|
||||
"description": "Actions Http Client",
|
||||
"keywords": [
|
||||
"github",
|
||||
@ -39,10 +39,13 @@
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "20.7.1",
|
||||
"@types/tunnel": "0.0.3",
|
||||
"proxy": "^1.0.1"
|
||||
"proxy": "^2.1.1",
|
||||
"@types/proxy": "^1.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"tunnel": "^0.0.6"
|
||||
"tunnel": "^0.0.6",
|
||||
"undici": "^5.25.4"
|
||||
}
|
||||
}
|
||||
}
|
34
node_modules/@azure/abort-controller/CHANGELOG.md
generated
vendored
Normal file
34
node_modules/@azure/abort-controller/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
# Release History
|
||||
|
||||
## 1.1.0 (2022-05-05)
|
||||
|
||||
- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features
|
||||
- With the dropping of support for Node.js versions that are no longer in LTS, the dependency on `@types/node` has been updated to version 12. Read our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details.
|
||||
|
||||
## 1.0.4 (2021-03-04)
|
||||
|
||||
Fixes issue [13985](https://github.com/Azure/azure-sdk-for-js/issues/13985) where abort event listeners that removed themselves when invoked could prevent other event listeners from being invoked.
|
||||
|
||||
## 1.0.3 (2021-02-23)
|
||||
|
||||
Support Typescript version < 3.6 by down-leveling the type definition files. ([PR 12793](https://github.com/Azure/azure-sdk-for-js/pull/12793))
|
||||
|
||||
## 1.0.2 (2020-01-07)
|
||||
|
||||
Updates the `tslib` dependency to version 2.x.
|
||||
|
||||
## 1.0.1 (2019-12-04)
|
||||
|
||||
Fixes the [bug 6271](https://github.com/Azure/azure-sdk-for-js/issues/6271) that can occur with angular prod builds due to triple-slash directives.
|
||||
([PR 6344](https://github.com/Azure/azure-sdk-for-js/pull/6344))
|
||||
|
||||
## 1.0.0 (2019-10-29)
|
||||
|
||||
This release marks the general availability of the `@azure/abort-controller` package.
|
||||
|
||||
Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel.
|
||||
([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860))
|
||||
|
||||
## 1.0.0-preview.2 (2019-09-09)
|
||||
|
||||
Listeners attached to an `AbortSignal` now receive an event with the type `abort`. (PR #4756)
|
21
node_modules/@azure/abort-controller/LICENSE
generated
vendored
Normal file
21
node_modules/@azure/abort-controller/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2020 Microsoft
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
110
node_modules/@azure/abort-controller/README.md
generated
vendored
Normal file
110
node_modules/@azure/abort-controller/README.md
generated
vendored
Normal file
@ -0,0 +1,110 @@
|
||||
# Azure Abort Controller client library for JavaScript
|
||||
|
||||
The `@azure/abort-controller` package provides `AbortController` and `AbortSignal` classes. These classes are compatible
|
||||
with the [AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) built into modern browsers
|
||||
and the `AbortSignal` used by [fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API).
|
||||
Use the `AbortController` class to create an instance of the `AbortSignal` class that can be used to cancel an operation
|
||||
in an Azure SDK that accept a parameter of type `AbortSignalLike`.
|
||||
|
||||
Key links:
|
||||
|
||||
- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller)
|
||||
- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller)
|
||||
- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme)
|
||||
|
||||
## Getting started
|
||||
|
||||
### Installation
|
||||
|
||||
Install this library using npm as follows
|
||||
|
||||
```
|
||||
npm install @azure/abort-controller
|
||||
```
|
||||
|
||||
## Key Concepts
|
||||
|
||||
Use the `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel
|
||||
pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`.
|
||||
An `AbortSignal` can also be returned directly from a static method, e.g. `AbortController.timeout(100)`.
|
||||
that is cancelled after 100 milliseconds.
|
||||
|
||||
Calling `abort()` on the instantiated `AbortController` invokes the registered `abort`
|
||||
event listeners on the associated `AbortSignal`.
|
||||
Any subsequent calls to `abort()` on the same controller will have no effect.
|
||||
|
||||
The `AbortSignal.none` static property returns an `AbortSignal` that can not be aborted.
|
||||
|
||||
Multiple instances of an `AbortSignal` can be linked so that calling `abort()` on the parent signal,
|
||||
aborts all linked signals.
|
||||
This linkage is one-way, meaning that a parent signal can affect a linked signal, but not the other way around.
|
||||
To link `AbortSignals` together, pass in the parent signals to the `AbortController` constructor.
|
||||
|
||||
## Examples
|
||||
|
||||
The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is
|
||||
of the abort signal.
|
||||
|
||||
### Example 1 - basic usage
|
||||
|
||||
```js
|
||||
import { AbortController } from "@azure/abort-controller";
|
||||
|
||||
const controller = new AbortController();
|
||||
doAsyncWork({ abortSignal: controller.signal });
|
||||
|
||||
// at some point later
|
||||
controller.abort();
|
||||
```
|
||||
|
||||
### Example 2 - Aborting with timeout
|
||||
|
||||
```js
|
||||
import { AbortController } from "@azure/abort-controller";
|
||||
|
||||
const signal = AbortController.timeout(1000);
|
||||
doAsyncWork({ abortSignal: signal });
|
||||
```
|
||||
|
||||
### Example 3 - Aborting sub-tasks
|
||||
|
||||
```js
|
||||
import { AbortController } from "@azure/abort-controller";
|
||||
|
||||
const allTasksController = new AbortController();
|
||||
|
||||
const subTask1 = new AbortController(allTasksController.signal);
|
||||
const subtask2 = new AbortController(allTasksController.signal);
|
||||
|
||||
allTasksController.abort(); // aborts allTasksSignal, subTask1, subTask2
|
||||
subTask1.abort(); // aborts only subTask1
|
||||
```
|
||||
|
||||
### Example 4 - Aborting with parent signal or timeout
|
||||
|
||||
```js
|
||||
import { AbortController } from "@azure/abort-controller";
|
||||
|
||||
const allTasksController = new AbortController();
|
||||
|
||||
// create a subtask controller that can be aborted manually,
|
||||
// or when either the parent task aborts or the timeout is reached.
|
||||
const subTask = new AbortController(allTasksController.signal, AbortController.timeout(100));
|
||||
|
||||
allTasksController.abort(); // aborts allTasksSignal, subTask
|
||||
subTask.abort(); // aborts only subTask
|
||||
```
|
||||
|
||||
## Next steps
|
||||
|
||||
You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new).
|
||||
|
||||
## Contributing
|
||||
|
||||
If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code.
|
||||
|
||||

|
118
node_modules/@azure/abort-controller/dist-esm/src/AbortController.js
generated
vendored
Normal file
118
node_modules/@azure/abort-controller/dist-esm/src/AbortController.js
generated
vendored
Normal file
@ -0,0 +1,118 @@
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
import { AbortSignal, abortSignal } from "./AbortSignal";
|
||||
/**
|
||||
* This error is thrown when an asynchronous operation has been aborted.
|
||||
* Check for this error by testing the `name` that the name property of the
|
||||
* error matches `"AbortError"`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const controller = new AbortController();
|
||||
* controller.abort();
|
||||
* try {
|
||||
* doAsyncWork(controller.signal)
|
||||
* } catch (e) {
|
||||
* if (e.name === 'AbortError') {
|
||||
* // handle abort error here.
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export class AbortError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "AbortError";
|
||||
}
|
||||
}
|
||||
/**
|
||||
* An AbortController provides an AbortSignal and the associated controls to signal
|
||||
* that an asynchronous operation should be aborted.
|
||||
*
|
||||
* @example
|
||||
* Abort an operation when another event fires
|
||||
* ```ts
|
||||
* const controller = new AbortController();
|
||||
* const signal = controller.signal;
|
||||
* doAsyncWork(signal);
|
||||
* button.addEventListener('click', () => controller.abort());
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* Share aborter cross multiple operations in 30s
|
||||
* ```ts
|
||||
* // Upload the same data to 2 different data centers at the same time,
|
||||
* // abort another when any of them is finished
|
||||
* const controller = AbortController.withTimeout(30 * 1000);
|
||||
* doAsyncWork(controller.signal).then(controller.abort);
|
||||
* doAsyncWork(controller.signal).then(controller.abort);
|
||||
*```
|
||||
*
|
||||
* @example
|
||||
* Cascaded aborting
|
||||
* ```ts
|
||||
* // All operations can't take more than 30 seconds
|
||||
* const aborter = Aborter.timeout(30 * 1000);
|
||||
*
|
||||
* // Following 2 operations can't take more than 25 seconds
|
||||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||
* ```
|
||||
*/
|
||||
export class AbortController {
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
constructor(parentSignals) {
|
||||
this._signal = new AbortSignal();
|
||||
if (!parentSignals) {
|
||||
return;
|
||||
}
|
||||
// coerce parentSignals into an array
|
||||
if (!Array.isArray(parentSignals)) {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
parentSignals = arguments;
|
||||
}
|
||||
for (const parentSignal of parentSignals) {
|
||||
// if the parent signal has already had abort() called,
|
||||
// then call abort on this signal as well.
|
||||
if (parentSignal.aborted) {
|
||||
this.abort();
|
||||
}
|
||||
else {
|
||||
// when the parent signal aborts, this signal should as well.
|
||||
parentSignal.addEventListener("abort", () => {
|
||||
this.abort();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The AbortSignal associated with this controller that will signal aborted
|
||||
* when the abort method is called on this controller.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get signal() {
|
||||
return this._signal;
|
||||
}
|
||||
/**
|
||||
* Signal that any operations passed this controller's associated abort signal
|
||||
* to cancel any remaining work and throw an `AbortError`.
|
||||
*/
|
||||
abort() {
|
||||
abortSignal(this._signal);
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will abort after the provided ms.
|
||||
* @param ms - Elapsed time in milliseconds to trigger an abort.
|
||||
*/
|
||||
static timeout(ms) {
|
||||
const signal = new AbortSignal();
|
||||
const timer = setTimeout(abortSignal, ms, signal);
|
||||
// Prevent the active Timer from keeping the Node.js event loop active.
|
||||
if (typeof timer.unref === "function") {
|
||||
timer.unref();
|
||||
}
|
||||
return signal;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=AbortController.js.map
|
1
node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map
generated
vendored
Normal file
1
node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
115
node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js
generated
vendored
Normal file
115
node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js
generated
vendored
Normal file
@ -0,0 +1,115 @@
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
/// <reference path="../shims-public.d.ts" />
|
||||
const listenersMap = new WeakMap();
|
||||
const abortedMap = new WeakMap();
|
||||
/**
|
||||
* An aborter instance implements AbortSignal interface, can abort HTTP requests.
|
||||
*
|
||||
* - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.
|
||||
* Use `AbortSignal.none` when you are required to pass a cancellation token but the operation
|
||||
* cannot or will not ever be cancelled.
|
||||
*
|
||||
* @example
|
||||
* Abort without timeout
|
||||
* ```ts
|
||||
* await doAsyncWork(AbortSignal.none);
|
||||
* ```
|
||||
*/
|
||||
export class AbortSignal {
|
||||
constructor() {
|
||||
/**
|
||||
* onabort event listener.
|
||||
*/
|
||||
this.onabort = null;
|
||||
listenersMap.set(this, []);
|
||||
abortedMap.set(this, false);
|
||||
}
|
||||
/**
|
||||
* Status of whether aborted or not.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get aborted() {
|
||||
if (!abortedMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
return abortedMap.get(this);
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will never be aborted.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
static get none() {
|
||||
return new AbortSignal();
|
||||
}
|
||||
/**
|
||||
* Added new "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be added
|
||||
*/
|
||||
addEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
const listeners = listenersMap.get(this);
|
||||
listeners.push(listener);
|
||||
}
|
||||
/**
|
||||
* Remove "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be removed
|
||||
*/
|
||||
removeEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
const listeners = listenersMap.get(this);
|
||||
const index = listeners.indexOf(listener);
|
||||
if (index > -1) {
|
||||
listeners.splice(index, 1);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Dispatches a synthetic event to the AbortSignal.
|
||||
*/
|
||||
dispatchEvent(_event) {
|
||||
throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
|
||||
* Will try to trigger abort event for all linked AbortSignal nodes.
|
||||
*
|
||||
* - If there is a timeout, the timer will be cancelled.
|
||||
* - If aborted is true, nothing will happen.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters
|
||||
export function abortSignal(signal) {
|
||||
if (signal.aborted) {
|
||||
return;
|
||||
}
|
||||
if (signal.onabort) {
|
||||
signal.onabort.call(signal);
|
||||
}
|
||||
const listeners = listenersMap.get(signal);
|
||||
if (listeners) {
|
||||
// Create a copy of listeners so mutations to the array
|
||||
// (e.g. via removeListener calls) don't affect the listeners
|
||||
// we invoke.
|
||||
listeners.slice().forEach((listener) => {
|
||||
listener.call(signal, { type: "abort" });
|
||||
});
|
||||
}
|
||||
abortedMap.set(signal, true);
|
||||
}
|
||||
//# sourceMappingURL=AbortSignal.js.map
|
1
node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map
generated
vendored
Normal file
1
node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
12
node_modules/@azure/abort-controller/dist-esm/src/index.js
generated
vendored
Normal file
12
node_modules/@azure/abort-controller/dist-esm/src/index.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
// Changes to Aborter
|
||||
// * Rename Aborter to AbortSignal
|
||||
// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation
|
||||
// * Remove withTimeout, it's moved to the controller
|
||||
// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller.
|
||||
// Potential changes to align with DOM Spec
|
||||
// * dispatchEvent on Signal
|
||||
export { AbortController, AbortError } from "./AbortController";
|
||||
export { AbortSignal } from "./AbortSignal";
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/@azure/abort-controller/dist-esm/src/index.js.map
generated
vendored
Normal file
1
node_modules/@azure/abort-controller/dist-esm/src/index.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,qBAAqB;AACrB,kCAAkC;AAClC,uHAAuH;AACvH,qDAAqD;AACrD,2GAA2G;AAE3G,2CAA2C;AAC3C,4BAA4B;AAE5B,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Changes to Aborter\n// * Rename Aborter to AbortSignal\n// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation\n// * Remove withTimeout, it's moved to the controller\n// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller.\n\n// Potential changes to align with DOM Spec\n// * dispatchEvent on Signal\n\nexport { AbortController, AbortError } from \"./AbortController\";\nexport { AbortSignal, AbortSignalLike } from \"./AbortSignal\";\n"]}
|
239
node_modules/@azure/abort-controller/dist/index.js
generated
vendored
Normal file
239
node_modules/@azure/abort-controller/dist/index.js
generated
vendored
Normal file
@ -0,0 +1,239 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
/// <reference path="../shims-public.d.ts" />
|
||||
const listenersMap = new WeakMap();
|
||||
const abortedMap = new WeakMap();
|
||||
/**
|
||||
* An aborter instance implements AbortSignal interface, can abort HTTP requests.
|
||||
*
|
||||
* - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.
|
||||
* Use `AbortSignal.none` when you are required to pass a cancellation token but the operation
|
||||
* cannot or will not ever be cancelled.
|
||||
*
|
||||
* @example
|
||||
* Abort without timeout
|
||||
* ```ts
|
||||
* await doAsyncWork(AbortSignal.none);
|
||||
* ```
|
||||
*/
|
||||
class AbortSignal {
|
||||
constructor() {
|
||||
/**
|
||||
* onabort event listener.
|
||||
*/
|
||||
this.onabort = null;
|
||||
listenersMap.set(this, []);
|
||||
abortedMap.set(this, false);
|
||||
}
|
||||
/**
|
||||
* Status of whether aborted or not.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get aborted() {
|
||||
if (!abortedMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
return abortedMap.get(this);
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will never be aborted.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
static get none() {
|
||||
return new AbortSignal();
|
||||
}
|
||||
/**
|
||||
* Added new "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be added
|
||||
*/
|
||||
addEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
const listeners = listenersMap.get(this);
|
||||
listeners.push(listener);
|
||||
}
|
||||
/**
|
||||
* Remove "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be removed
|
||||
*/
|
||||
removeEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
const listeners = listenersMap.get(this);
|
||||
const index = listeners.indexOf(listener);
|
||||
if (index > -1) {
|
||||
listeners.splice(index, 1);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Dispatches a synthetic event to the AbortSignal.
|
||||
*/
|
||||
dispatchEvent(_event) {
|
||||
throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
|
||||
* Will try to trigger abort event for all linked AbortSignal nodes.
|
||||
*
|
||||
* - If there is a timeout, the timer will be cancelled.
|
||||
* - If aborted is true, nothing will happen.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters
|
||||
function abortSignal(signal) {
|
||||
if (signal.aborted) {
|
||||
return;
|
||||
}
|
||||
if (signal.onabort) {
|
||||
signal.onabort.call(signal);
|
||||
}
|
||||
const listeners = listenersMap.get(signal);
|
||||
if (listeners) {
|
||||
// Create a copy of listeners so mutations to the array
|
||||
// (e.g. via removeListener calls) don't affect the listeners
|
||||
// we invoke.
|
||||
listeners.slice().forEach((listener) => {
|
||||
listener.call(signal, { type: "abort" });
|
||||
});
|
||||
}
|
||||
abortedMap.set(signal, true);
|
||||
}
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
/**
|
||||
* This error is thrown when an asynchronous operation has been aborted.
|
||||
* Check for this error by testing the `name` that the name property of the
|
||||
* error matches `"AbortError"`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const controller = new AbortController();
|
||||
* controller.abort();
|
||||
* try {
|
||||
* doAsyncWork(controller.signal)
|
||||
* } catch (e) {
|
||||
* if (e.name === 'AbortError') {
|
||||
* // handle abort error here.
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class AbortError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "AbortError";
|
||||
}
|
||||
}
|
||||
/**
|
||||
* An AbortController provides an AbortSignal and the associated controls to signal
|
||||
* that an asynchronous operation should be aborted.
|
||||
*
|
||||
* @example
|
||||
* Abort an operation when another event fires
|
||||
* ```ts
|
||||
* const controller = new AbortController();
|
||||
* const signal = controller.signal;
|
||||
* doAsyncWork(signal);
|
||||
* button.addEventListener('click', () => controller.abort());
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* Share aborter cross multiple operations in 30s
|
||||
* ```ts
|
||||
* // Upload the same data to 2 different data centers at the same time,
|
||||
* // abort another when any of them is finished
|
||||
* const controller = AbortController.withTimeout(30 * 1000);
|
||||
* doAsyncWork(controller.signal).then(controller.abort);
|
||||
* doAsyncWork(controller.signal).then(controller.abort);
|
||||
*```
|
||||
*
|
||||
* @example
|
||||
* Cascaded aborting
|
||||
* ```ts
|
||||
* // All operations can't take more than 30 seconds
|
||||
* const aborter = Aborter.timeout(30 * 1000);
|
||||
*
|
||||
* // Following 2 operations can't take more than 25 seconds
|
||||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||
* ```
|
||||
*/
|
||||
class AbortController {
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
constructor(parentSignals) {
|
||||
this._signal = new AbortSignal();
|
||||
if (!parentSignals) {
|
||||
return;
|
||||
}
|
||||
// coerce parentSignals into an array
|
||||
if (!Array.isArray(parentSignals)) {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
parentSignals = arguments;
|
||||
}
|
||||
for (const parentSignal of parentSignals) {
|
||||
// if the parent signal has already had abort() called,
|
||||
// then call abort on this signal as well.
|
||||
if (parentSignal.aborted) {
|
||||
this.abort();
|
||||
}
|
||||
else {
|
||||
// when the parent signal aborts, this signal should as well.
|
||||
parentSignal.addEventListener("abort", () => {
|
||||
this.abort();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The AbortSignal associated with this controller that will signal aborted
|
||||
* when the abort method is called on this controller.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get signal() {
|
||||
return this._signal;
|
||||
}
|
||||
/**
|
||||
* Signal that any operations passed this controller's associated abort signal
|
||||
* to cancel any remaining work and throw an `AbortError`.
|
||||
*/
|
||||
abort() {
|
||||
abortSignal(this._signal);
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will abort after the provided ms.
|
||||
* @param ms - Elapsed time in milliseconds to trigger an abort.
|
||||
*/
|
||||
static timeout(ms) {
|
||||
const signal = new AbortSignal();
|
||||
const timer = setTimeout(abortSignal, ms, signal);
|
||||
// Prevent the active Timer from keeping the Node.js event loop active.
|
||||
if (typeof timer.unref === "function") {
|
||||
timer.unref();
|
||||
}
|
||||
return signal;
|
||||
}
|
||||
}
|
||||
|
||||
exports.AbortController = AbortController;
|
||||
exports.AbortError = AbortError;
|
||||
exports.AbortSignal = AbortSignal;
|
||||
//# sourceMappingURL=index.js.map
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user