From c36dc43e7be276cc63ea11161d744cccdc2c731f Mon Sep 17 00:00:00 2001 From: Dmitry Shibanov Date: Tue, 26 Apr 2022 16:50:29 +0200 Subject: [PATCH] Fix conflicts (#389) --- dist/cache-save/index.js | 41 +++++++++++++++++++++++++------------ dist/setup/index.js | 44 +++++++++++++++++++++++++--------------- src/setup-python.ts | 2 -- 3 files changed, 56 insertions(+), 31 deletions(-) diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index c1c654f2..3c1d1c82 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -1148,6 +1148,11 @@ function assertDefined(name, value) { return value; } exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -3806,18 +3811,18 @@ function downloadCache(archiveLocation, archivePath, options) { exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const reserveCacheRequest = { key, - version + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + return response; }); } exports.reserveCache = reserveCache; @@ -41528,18 +41533,12 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); + let cacheId = null; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); @@ -41554,9 +41553,24 @@ function saveCache(paths, key, options) { const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, options); } @@ -50335,7 +50349,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry return { statusCode: error.statusCode, result: null, - headers: {} + headers: {}, + error }; } else { diff --git a/dist/setup/index.js b/dist/setup/index.js index ca18baa2..c51a5f77 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -1148,6 +1148,11 @@ function assertDefined(name, value) { return value; } exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -3959,18 +3964,18 @@ function downloadCache(archiveLocation, archivePath, options) { exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const reserveCacheRequest = { key, - version + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + return response; }); } exports.reserveCache = reserveCache; @@ -6107,9 +6112,6 @@ function run() { yield cacheDependencies(cache, pythonVersion); } } - else { - throw new Error('there\'s empty python-version input'); - } const matchersPath = path.join(__dirname, '../..', '.github'); core.info(`##[add-matcher]${path.join(matchersPath, 'python.json')}`); } @@ -47085,18 +47087,12 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); + let cacheId = null; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); @@ -47111,9 +47107,24 @@ function saveCache(paths, key, options) { const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, options); } @@ -56422,7 +56433,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry return { statusCode: error.statusCode, result: null, - headers: {} + headers: {}, + error }; } else { diff --git a/src/setup-python.ts b/src/setup-python.ts index 3340a86e..62c76dba 100644 --- a/src/setup-python.ts +++ b/src/setup-python.ts @@ -53,8 +53,6 @@ async function run() { if (cache && isCacheFeatureAvailable()) { await cacheDependencies(cache, pythonVersion); } - } else { - throw new Error('there\'s empty python-version input') } const matchersPath = path.join(__dirname, '../..', '.github'); core.info(`##[add-matcher]${path.join(matchersPath, 'python.json')}`);