mirror of
https://github.com/actions/cache.git
synced 2025-12-19 21:58:57 +08:00
Compare commits
1 Commits
robherley/
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4755714be1 |
6
.github/workflows/issue-opened-workflow.yml
vendored
6
.github/workflows/issue-opened-workflow.yml
vendored
@@ -14,3 +14,9 @@ jobs:
|
||||
- name: add_assignees
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
name: Add to Project Board
|
||||
with:
|
||||
project-url: https://github.com/orgs/actions/projects/12
|
||||
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||
|
||||
6
.github/workflows/pr-opened-workflow.yml
vendored
6
.github/workflows/pr-opened-workflow.yml
vendored
@@ -18,3 +18,9 @@ jobs:
|
||||
- name: Add Assignee
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
name: Add to Project Board
|
||||
with:
|
||||
project-url: https://github.com/orgs/actions/projects/12
|
||||
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||
|
||||
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@actions/http-client.dep.yml
generated
BIN
.licenses/npm/@actions/http-client.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@azure/core-http.dep.yml
generated
BIN
.licenses/npm/@azure/core-http.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
BIN
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/ip-regex.dep.yml
generated
Normal file
BIN
.licenses/npm/ip-regex.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/psl.dep.yml
generated
Normal file
BIN
.licenses/npm/psl.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/punycode.dep.yml
generated
Normal file
BIN
.licenses/npm/punycode.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/semver.dep.yml
generated
BIN
.licenses/npm/semver.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/tough-cookie.dep.yml
generated
Normal file
BIN
.licenses/npm/tough-cookie.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/xml2js.dep.yml
generated
BIN
.licenses/npm/xml2js.dep.yml
generated
Binary file not shown.
@@ -107,12 +107,3 @@
|
||||
### 3.3.1
|
||||
|
||||
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
||||
|
||||
### 3.3.2
|
||||
|
||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
||||
|
||||
### 3.3.3
|
||||
|
||||
- Updates @actions/cache to v3.2.3 to fix accidental mutated path arguments to `getCacheVersion` [actions/toolkit#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Additional audit fixes of npm package(s)
|
||||
|
||||
@@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import { restoreRun } from "../src/restoreImpl";
|
||||
import run from "../src/restore";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
@@ -71,7 +71,7 @@ test("restore with no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -156,7 +156,7 @@ test("restore with cache found for key", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -201,7 +201,7 @@ test("restore with cache found for restore key", async () => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -246,7 +246,7 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -289,7 +289,7 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -335,7 +335,7 @@ test("restore with fail on cache miss disabled and no cache found", async () =>
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
||||
@@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import { restoreImpl } from "../src/restoreImpl";
|
||||
import run from "../src/restoreImpl";
|
||||
import { StateProvider } from "../src/stateProvider";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
@@ -60,7 +60,7 @@ test("restore with invalid event outputs warning", async () => {
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
@@ -76,7 +76,7 @@ test("restore without AC available should no-op", async () => {
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
@@ -92,7 +92,7 @@ test("restore on GHES without AC available should no-op", async () => {
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
@@ -119,7 +119,7 @@ test("restore on GHES with AC available ", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -143,7 +143,7 @@ test("restore on GHES with AC available ", async () => {
|
||||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
@@ -155,7 +155,7 @@ test("restore with no key", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
@@ -174,7 +174,7 @@ test("restore with too many keys should fail", async () => {
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
@@ -200,7 +200,7 @@ test("restore with large key should fail", async () => {
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
@@ -226,7 +226,7 @@ test("restore with invalid key should fail", async () => {
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
@@ -260,7 +260,7 @@ test("restore with no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -301,7 +301,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -341,7 +341,7 @@ test("restore with cache found for key", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -383,7 +383,7 @@ test("restore with cache found for restore key", async () => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -424,7 +424,7 @@ test("restore with lookup-only set", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await restoreImpl(new StateProvider());
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
||||
@@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import { restoreOnlyRun } from "../src/restoreImpl";
|
||||
import run from "../src/restoreOnly";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
@@ -72,7 +72,7 @@ test("restore with no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreOnlyRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await restoreOnlyRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -153,7 +153,7 @@ test("restore with cache found for key", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await restoreOnlyRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@@ -196,7 +196,7 @@ test("restore with cache found for restore key", async () => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await restoreOnlyRun();
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
||||
403
dist/restore-only/index.js
vendored
403
dist/restore-only/index.js
vendored
@@ -1127,19 +1127,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@@ -1157,7 +1155,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@@ -3384,8 +3382,7 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
const components = paths;
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@@ -3397,7 +3394,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@@ -3450,21 +3450,13 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@@ -3497,7 +3489,9 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@@ -4872,13 +4866,7 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@@ -4889,10 +4877,6 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@@ -4918,24 +4902,13 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -5584,7 +5557,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@@ -5741,115 +5714,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@@ -7272,11 +7136,8 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@@ -7285,31 +7146,6 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@@ -7319,14 +7155,14 @@ function makeSafeRe (value) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@@ -7368,7 +7204,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@@ -7448,7 +7284,6 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@@ -7458,7 +7293,6 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@@ -7474,7 +7308,6 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@@ -7496,7 +7329,6 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@@ -7525,14 +7357,6 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7557,7 +7381,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@@ -7612,7 +7436,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@@ -8057,7 +7881,6 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@@ -8074,7 +7897,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@@ -8198,16 +8021,9 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@@ -8215,7 +8031,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@@ -8234,19 +8050,20 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@@ -8254,7 +8071,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@@ -8354,7 +8171,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8395,7 +8212,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8454,7 +8271,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@@ -8529,7 +8346,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@@ -8855,7 +8672,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
match = version.match(re[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@@ -8866,17 +8683,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@@ -36020,19 +35837,6 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@@ -37097,9 +36901,28 @@ exports.default = {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const restoreImpl_1 = __webpack_require__(835);
|
||||
(0, restoreImpl_1.restoreOnlyRun)(true);
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.NullStateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -40487,8 +40310,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
useAzureSdk: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@@ -40498,9 +40320,6 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@@ -49277,11 +49096,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(694);
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
const utils = __importStar(__webpack_require__(360));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
@@ -49332,40 +49149,7 @@ function restoreImpl(stateProvider) {
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.restoreImpl = restoreImpl;
|
||||
function run(stateProvider, earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield restoreImpl(stateProvider);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
function restoreOnlyRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreOnlyRun = restoreOnlyRun;
|
||||
function restoreRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreRun = restoreRun;
|
||||
exports.default = restoreImpl;
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -50305,14 +50089,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = Object.create(null);
|
||||
obj[attrkey] = {};
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@@ -50362,11 +50146,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@@ -50390,7 +50170,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = Object.create(null);
|
||||
node = {};
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@@ -50405,7 +50185,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = Object.create(null);
|
||||
objClone = {};
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@@ -50422,7 +50202,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@@ -52624,7 +52404,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@@ -52632,12 +52412,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@@ -55035,7 +54815,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString().trim(),
|
||||
value: headerValue.toString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -55175,7 +54955,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.4",
|
||||
coreHttpVersion: "3.0.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@@ -55253,6 +55033,13 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@@ -59933,7 +59720,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@@ -59965,7 +59752,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@@ -59978,7 +59765,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@@ -60109,7 +59896,10 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
return options.credentialScopes;
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@@ -60342,10 +60132,6 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@@ -60385,6 +60171,7 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
||||
403
dist/restore/index.js
vendored
403
dist/restore/index.js
vendored
@@ -1127,19 +1127,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@@ -1157,7 +1155,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@@ -3384,8 +3382,7 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
const components = paths;
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@@ -3397,7 +3394,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@@ -3450,21 +3450,13 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@@ -3497,7 +3489,9 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@@ -4872,13 +4866,7 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@@ -4889,10 +4877,6 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@@ -4918,24 +4902,13 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -5584,7 +5557,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@@ -5741,115 +5714,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@@ -7272,11 +7136,8 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@@ -7285,31 +7146,6 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@@ -7319,14 +7155,14 @@ function makeSafeRe (value) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@@ -7368,7 +7204,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@@ -7448,7 +7284,6 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@@ -7458,7 +7293,6 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@@ -7474,7 +7308,6 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@@ -7496,7 +7329,6 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@@ -7525,14 +7357,6 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7557,7 +7381,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@@ -7612,7 +7436,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@@ -8057,7 +7881,6 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@@ -8074,7 +7897,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@@ -8198,16 +8021,9 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@@ -8215,7 +8031,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@@ -8234,19 +8050,20 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@@ -8254,7 +8071,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@@ -8354,7 +8171,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8395,7 +8212,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8454,7 +8271,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@@ -8529,7 +8346,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@@ -8855,7 +8672,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
match = version.match(re[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@@ -8866,17 +8683,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@@ -35928,19 +35745,6 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@@ -40477,8 +40281,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
useAzureSdk: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@@ -40488,9 +40291,6 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@@ -47653,9 +47453,28 @@ module.exports = function(dst, src) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const restoreImpl_1 = __webpack_require__(835);
|
||||
(0, restoreImpl_1.restoreRun)(true);
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -49277,11 +49096,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(694);
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
@@ -49332,40 +49149,7 @@ function restoreImpl(stateProvider) {
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.restoreImpl = restoreImpl;
|
||||
function run(stateProvider, earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield restoreImpl(stateProvider);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
function restoreOnlyRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreOnlyRun = restoreOnlyRun;
|
||||
function restoreRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreRun = restoreRun;
|
||||
exports.default = restoreImpl;
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -50305,14 +50089,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = Object.create(null);
|
||||
obj[attrkey] = {};
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@@ -50362,11 +50146,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@@ -50390,7 +50170,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = Object.create(null);
|
||||
node = {};
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@@ -50405,7 +50185,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = Object.create(null);
|
||||
objClone = {};
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@@ -50422,7 +50202,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@@ -52624,7 +52404,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@@ -52632,12 +52412,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@@ -55035,7 +54815,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString().trim(),
|
||||
value: headerValue.toString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -55175,7 +54955,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.4",
|
||||
coreHttpVersion: "3.0.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@@ -55253,6 +55033,13 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@@ -59933,7 +59720,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@@ -59965,7 +59752,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@@ -59978,7 +59765,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@@ -60109,7 +59896,10 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
return options.credentialScopes;
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@@ -60342,10 +60132,6 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@@ -60385,6 +60171,7 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
||||
343
dist/save-only/index.js
vendored
343
dist/save-only/index.js
vendored
@@ -1183,19 +1183,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@@ -1213,7 +1211,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@@ -3440,8 +3438,7 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
const components = paths;
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@@ -3453,7 +3450,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@@ -3506,21 +3506,13 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@@ -3553,7 +3545,9 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@@ -4928,13 +4922,7 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@@ -4945,10 +4933,6 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@@ -4974,24 +4958,13 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -5640,7 +5613,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@@ -5797,115 +5770,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@@ -7328,11 +7192,8 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@@ -7341,31 +7202,6 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@@ -7375,14 +7211,14 @@ function makeSafeRe (value) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@@ -7424,7 +7260,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@@ -7504,7 +7340,6 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@@ -7514,7 +7349,6 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@@ -7530,7 +7364,6 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@@ -7552,7 +7385,6 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@@ -7581,14 +7413,6 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7613,7 +7437,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@@ -7668,7 +7492,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@@ -8113,7 +7937,6 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@@ -8130,7 +7953,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@@ -8254,16 +8077,9 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@@ -8271,7 +8087,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@@ -8290,19 +8106,20 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@@ -8310,7 +8127,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@@ -8410,7 +8227,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8451,7 +8268,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8510,7 +8327,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@@ -8585,7 +8402,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@@ -8911,7 +8728,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
match = version.match(re[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@@ -8922,17 +8739,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@@ -35979,19 +35796,6 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@@ -40618,8 +40422,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
useAzureSdk: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@@ -40629,9 +40432,6 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@@ -50312,14 +50112,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = Object.create(null);
|
||||
obj[attrkey] = {};
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@@ -50369,11 +50169,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@@ -50397,7 +50193,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = Object.create(null);
|
||||
node = {};
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@@ -50412,7 +50208,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = Object.create(null);
|
||||
objClone = {};
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@@ -50429,7 +50225,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@@ -52631,7 +52427,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@@ -52639,12 +52435,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@@ -55042,7 +54838,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString().trim(),
|
||||
value: headerValue.toString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -55182,7 +54978,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.4",
|
||||
coreHttpVersion: "3.0.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@@ -55260,6 +55056,13 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@@ -59940,7 +59743,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@@ -59972,7 +59775,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@@ -59985,7 +59788,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@@ -60116,7 +59919,10 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
return options.credentialScopes;
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@@ -60349,10 +60155,6 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@@ -60392,6 +60194,7 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
||||
343
dist/save/index.js
vendored
343
dist/save/index.js
vendored
@@ -1127,19 +1127,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@@ -1157,7 +1155,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@@ -3384,8 +3382,7 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
const components = paths;
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@@ -3397,7 +3394,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@@ -3450,21 +3450,13 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@@ -3497,7 +3489,9 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@@ -4872,13 +4866,7 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@@ -4889,10 +4877,6 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@@ -4918,24 +4902,13 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@@ -5584,7 +5557,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@@ -5741,115 +5714,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@@ -7272,11 +7136,8 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@@ -7285,31 +7146,6 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@@ -7319,14 +7155,14 @@ function makeSafeRe (value) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@@ -7368,7 +7204,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@@ -7448,7 +7284,6 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@@ -7458,7 +7293,6 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@@ -7474,7 +7308,6 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@@ -7496,7 +7329,6 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@@ -7525,14 +7357,6 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7557,7 +7381,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@@ -7612,7 +7436,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@@ -8057,7 +7881,6 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@@ -8074,7 +7897,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@@ -8198,16 +8021,9 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@@ -8215,7 +8031,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@@ -8234,19 +8050,20 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@@ -8254,7 +8071,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@@ -8354,7 +8171,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8395,7 +8212,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@@ -8454,7 +8271,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@@ -8529,7 +8346,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@@ -8855,7 +8672,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
match = version.match(re[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@@ -8866,17 +8683,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@@ -35923,19 +35740,6 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@@ -40562,8 +40366,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
useAzureSdk: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@@ -40573,9 +40376,6 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@@ -50285,14 +50085,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = Object.create(null);
|
||||
obj[attrkey] = {};
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@@ -50342,11 +50142,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@@ -50370,7 +50166,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = Object.create(null);
|
||||
node = {};
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@@ -50385,7 +50181,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = Object.create(null);
|
||||
objClone = {};
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@@ -50402,7 +50198,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = Object.create(null);
|
||||
obj = {};
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@@ -52604,7 +52400,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@@ -52612,12 +52408,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@@ -55015,7 +54811,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString().trim(),
|
||||
value: headerValue.toString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@@ -55155,7 +54951,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.4",
|
||||
coreHttpVersion: "3.0.0",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@@ -55233,6 +55029,13 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@@ -59913,7 +59716,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@@ -59945,7 +59748,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (coreUtil.isNode) {
|
||||
if (isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@@ -59958,7 +59761,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@@ -60089,7 +59892,10 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
return options.credentialScopes;
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@@ -60322,10 +60128,6 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@@ -60365,6 +60167,7 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
||||
1154
package-lock.json
generated
1154
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cache",
|
||||
"version": "3.3.3",
|
||||
"version": "3.3.1",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
@@ -23,7 +23,7 @@
|
||||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.3",
|
||||
"@actions/cache": "^3.2.1",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
import { restoreRun } from "./restoreImpl";
|
||||
import restoreImpl from "./restoreImpl";
|
||||
import { StateProvider } from "./stateProvider";
|
||||
|
||||
restoreRun(true);
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new StateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
|
||||
@@ -2,14 +2,10 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, Outputs, State } from "./constants";
|
||||
import {
|
||||
IStateProvider,
|
||||
NullStateProvider,
|
||||
StateProvider
|
||||
} from "./stateProvider";
|
||||
import { IStateProvider } from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
export async function restoreImpl(
|
||||
async function restoreImpl(
|
||||
stateProvider: IStateProvider
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
@@ -86,37 +82,4 @@ export async function restoreImpl(
|
||||
}
|
||||
}
|
||||
|
||||
async function run(
|
||||
stateProvider: IStateProvider,
|
||||
earlyExit: boolean | undefined
|
||||
): Promise<void> {
|
||||
try {
|
||||
await restoreImpl(stateProvider);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreOnlyRun(
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<void> {
|
||||
await run(new NullStateProvider(), earlyExit);
|
||||
}
|
||||
|
||||
export async function restoreRun(
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<void> {
|
||||
await run(new StateProvider(), earlyExit);
|
||||
}
|
||||
export default restoreImpl;
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
import { restoreOnlyRun } from "./restoreImpl";
|
||||
import restoreImpl from "./restoreImpl";
|
||||
import { NullStateProvider } from "./stateProvider";
|
||||
|
||||
restoreOnlyRun(true);
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new NullStateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
|
||||
Reference in New Issue
Block a user