Build outputs

This commit is contained in:
daz 2024-04-03 14:33:34 -06:00
parent c16d6e59a6
commit aa533feb22
No known key found for this signature in database
4 changed files with 172 additions and 54 deletions

View File

@ -2328,6 +2328,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0; exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292)); const promises_1 = __importDefault(__nccwpck_require__(73292));
const stream = __importStar(__nccwpck_require__(12781));
const fs_1 = __nccwpck_require__(57147);
const path = __importStar(__nccwpck_require__(71017));
const github = __importStar(__nccwpck_require__(21260)); const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186)); const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255)); const httpClient = __importStar(__nccwpck_require__(96255));
@ -2368,6 +2371,9 @@ function streamExtract(url, directory) {
return; return;
} }
catch (error) { catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
}
retryCount++; retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`); core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying // wait 5 seconds before retrying
@ -2390,6 +2396,8 @@ function streamExtractExternal(url, directory) {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`)); response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
}; };
const timer = setTimeout(timerFn, timeout); const timer = setTimeout(timerFn, timeout);
const createdDirectories = new Set();
createdDirectories.add(directory);
response.message response.message
.on('data', () => { .on('data', () => {
timer.refresh(); timer.refresh();
@ -2399,11 +2407,47 @@ function streamExtractExternal(url, directory) {
clearTimeout(timer); clearTimeout(timer);
reject(error); reject(error);
}) })
.pipe(unzip_stream_1.default.Extract({ path: directory })) .pipe(unzip_stream_1.default.Parse())
.on('close', () => { .pipe(new stream.Transform({
objectMode: true,
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
const fullPath = path.normalize(path.join(directory, entry.path));
if (!directory.endsWith(path.sep)) {
directory += path.sep;
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`));
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath);
yield resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain();
callback();
});
}
else {
entry.autodrain();
callback();
}
}
else {
core.info(`Extracting artifact entry: ${fullPath}`);
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath));
yield resolveOrCreateDirectory(path.dirname(fullPath));
}
const writeStream = (0, fs_1.createWriteStream)(fullPath);
writeStream.on('finish', callback);
writeStream.on('error', reject);
entry.pipe(writeStream);
}
})
}))
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
clearTimeout(timer); clearTimeout(timer);
resolve(); resolve();
}) }))
.on('error', (error) => { .on('error', (error) => {
reject(error); reject(error);
}); });
@ -3024,7 +3068,10 @@ function getResultsServiceUrl() {
exports.getResultsServiceUrl = getResultsServiceUrl; exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() { function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
} }
exports.isGhes = isGhes; exports.isGhes = isGhes;
function getGitHubWorkspaceDir() { function getGitHubWorkspaceDir() {
@ -7127,7 +7174,10 @@ function assertDefined(name, value) {
exports.assertDefined = assertDefined; exports.assertDefined = assertDefined;
function isGhes() { function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
} }
exports.isGhes = isGhes; exports.isGhes = isGhes;
//# sourceMappingURL=cacheUtils.js.map //# sourceMappingURL=cacheUtils.js.map
@ -15220,7 +15270,7 @@ class HttpClient {
if (this._keepAlive && useProxy) { if (this._keepAlive && useProxy) {
agent = this._proxyAgent; agent = this._proxyAgent;
} }
if (this._keepAlive && !useProxy) { if (!useProxy) {
agent = this._agent; agent = this._agent;
} }
// if agent is already assigned use that agent. // if agent is already assigned use that agent.
@ -15252,16 +15302,12 @@ class HttpClient {
agent = tunnelAgent(agentOptions); agent = tunnelAgent(agentOptions);
this._proxyAgent = agent; this._proxyAgent = agent;
} }
// if reusing agent across request and tunneling agent isn't assigned create a new agent // if tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) { if (!agent) {
const options = { keepAlive: this._keepAlive, maxSockets }; const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options); agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent; this._agent = agent;
} }
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) { if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
@ -102798,35 +102844,43 @@ const coerce = (version, options) => {
let match = null let match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
// //
// Walk through the string checking with a /g regexp // Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches. // Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
let next let next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = coerceRtlRegex.exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 coerceRtlRegex.lastIndex = -1
} }
if (match === null) { if (match === null) {
return null return null
} }
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) const major = match[2]
const minor = match[3] || '0'
const patch = match[4] || '0'
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
} }
module.exports = coerce module.exports = coerce
@ -103518,12 +103572,17 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion. // Coercion.
// Extract anything that could conceivably be a part of a valid semver // Extract anything that could conceivably be a part of a valid semver
createToken('COERCE', `${'(^|[^\\d])' + createToken('COERCEPLAIN', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
createToken('COERCEFULL', src[t.COERCEPLAIN] +
`(?:${src[t.PRERELEASE]})?` +
`(?:${src[t.BUILD]})?` +
`(?:$|[^\\d])`) `(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true) createToken('COERCERTL', src[t.COERCE], true)
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -139770,21 +139829,21 @@ class CacheCleaner {
}); });
}); });
} }
ageAllFiles(fileName = '*') { ageAllFiles() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Aging all files in Gradle User Home with name ${fileName}`); core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0)); yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
}); });
} }
touchAllFiles(fileName = '*') { touchAllFiles() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Touching all files in Gradle User Home with name ${fileName}`); core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date()); yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
}); });
} }
setUtimes(pattern, timestamp) { setUtimes(pattern, timestamp) {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
const globber = yield glob.create(pattern, { const globber = yield glob.create(pattern, {
implicitDescendants: false implicitDescendants: false
}); });
@ -139962,8 +140021,8 @@ class AbstractEntryExtractor {
}); });
} }
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) { saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
var _a;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
var _a;
const cacheKey = uniqueFileNames const cacheKey = uniqueFileNames
? this.createCacheKeyFromFileNames(artifactType, matchingFiles) ? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
: yield this.createCacheKeyFromFileContents(artifactType, pattern); : yield this.createCacheKeyFromFileContents(artifactType, pattern);
@ -142425,7 +142484,7 @@ function firstString() {
/***/ ((module) => { /***/ ((module) => {
"use strict"; "use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.4","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }), /***/ }),

File diff suppressed because one or more lines are too long

View File

@ -2328,6 +2328,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0; exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292)); const promises_1 = __importDefault(__nccwpck_require__(73292));
const stream = __importStar(__nccwpck_require__(12781));
const fs_1 = __nccwpck_require__(57147);
const path = __importStar(__nccwpck_require__(71017));
const github = __importStar(__nccwpck_require__(21260)); const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186)); const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255)); const httpClient = __importStar(__nccwpck_require__(96255));
@ -2368,6 +2371,9 @@ function streamExtract(url, directory) {
return; return;
} }
catch (error) { catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
}
retryCount++; retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`); core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying // wait 5 seconds before retrying
@ -2390,6 +2396,8 @@ function streamExtractExternal(url, directory) {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`)); response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
}; };
const timer = setTimeout(timerFn, timeout); const timer = setTimeout(timerFn, timeout);
const createdDirectories = new Set();
createdDirectories.add(directory);
response.message response.message
.on('data', () => { .on('data', () => {
timer.refresh(); timer.refresh();
@ -2399,11 +2407,47 @@ function streamExtractExternal(url, directory) {
clearTimeout(timer); clearTimeout(timer);
reject(error); reject(error);
}) })
.pipe(unzip_stream_1.default.Extract({ path: directory })) .pipe(unzip_stream_1.default.Parse())
.on('close', () => { .pipe(new stream.Transform({
objectMode: true,
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
const fullPath = path.normalize(path.join(directory, entry.path));
if (!directory.endsWith(path.sep)) {
directory += path.sep;
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`));
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath);
yield resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain();
callback();
});
}
else {
entry.autodrain();
callback();
}
}
else {
core.info(`Extracting artifact entry: ${fullPath}`);
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath));
yield resolveOrCreateDirectory(path.dirname(fullPath));
}
const writeStream = (0, fs_1.createWriteStream)(fullPath);
writeStream.on('finish', callback);
writeStream.on('error', reject);
entry.pipe(writeStream);
}
})
}))
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
clearTimeout(timer); clearTimeout(timer);
resolve(); resolve();
}) }))
.on('error', (error) => { .on('error', (error) => {
reject(error); reject(error);
}); });
@ -3024,7 +3068,10 @@ function getResultsServiceUrl() {
exports.getResultsServiceUrl = getResultsServiceUrl; exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() { function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
} }
exports.isGhes = isGhes; exports.isGhes = isGhes;
function getGitHubWorkspaceDir() { function getGitHubWorkspaceDir() {
@ -7127,7 +7174,10 @@ function assertDefined(name, value) {
exports.assertDefined = assertDefined; exports.assertDefined = assertDefined;
function isGhes() { function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
} }
exports.isGhes = isGhes; exports.isGhes = isGhes;
//# sourceMappingURL=cacheUtils.js.map //# sourceMappingURL=cacheUtils.js.map
@ -15220,7 +15270,7 @@ class HttpClient {
if (this._keepAlive && useProxy) { if (this._keepAlive && useProxy) {
agent = this._proxyAgent; agent = this._proxyAgent;
} }
if (this._keepAlive && !useProxy) { if (!useProxy) {
agent = this._agent; agent = this._agent;
} }
// if agent is already assigned use that agent. // if agent is already assigned use that agent.
@ -15252,16 +15302,12 @@ class HttpClient {
agent = tunnelAgent(agentOptions); agent = tunnelAgent(agentOptions);
this._proxyAgent = agent; this._proxyAgent = agent;
} }
// if reusing agent across request and tunneling agent isn't assigned create a new agent // if tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) { if (!agent) {
const options = { keepAlive: this._keepAlive, maxSockets }; const options = { keepAlive: this._keepAlive, maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options); agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent; this._agent = agent;
} }
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) { if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
@ -100251,35 +100297,43 @@ const coerce = (version, options) => {
let match = null let match = null
if (!options.rtl) { if (!options.rtl) {
match = version.match(re[t.COERCE]) match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
} else { } else {
// Find the right-most coercible string that does not share // Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string. // a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
// //
// Walk through the string checking with a /g regexp // Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches. // Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no // Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus. // coercible string can be more right-ward without the same terminus.
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
let next let next
while ((next = re[t.COERCERTL].exec(version)) && while ((next = coerceRtlRegex.exec(version)) &&
(!match || match.index + match[0].length !== version.length) (!match || match.index + match[0].length !== version.length)
) { ) {
if (!match || if (!match ||
next.index + next[0].length !== match.index + match[0].length) { next.index + next[0].length !== match.index + match[0].length) {
match = next match = next
} }
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
} }
// leave it in a clean state // leave it in a clean state
re[t.COERCERTL].lastIndex = -1 coerceRtlRegex.lastIndex = -1
} }
if (match === null) { if (match === null) {
return null return null
} }
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) const major = match[2]
const minor = match[3] || '0'
const patch = match[4] || '0'
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
} }
module.exports = coerce module.exports = coerce
@ -100971,12 +101025,17 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
// Coercion. // Coercion.
// Extract anything that could conceivably be a part of a valid semver // Extract anything that could conceivably be a part of a valid semver
createToken('COERCE', `${'(^|[^\\d])' + createToken('COERCEPLAIN', `${'(^|[^\\d])' +
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
createToken('COERCEFULL', src[t.COERCEPLAIN] +
`(?:${src[t.PRERELEASE]})?` +
`(?:${src[t.BUILD]})?` +
`(?:$|[^\\d])`) `(?:$|[^\\d])`)
createToken('COERCERTL', src[t.COERCE], true) createToken('COERCERTL', src[t.COERCE], true)
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
// Tilde ranges. // Tilde ranges.
// Meaning is "reasonably at or greater than" // Meaning is "reasonably at or greater than"
@ -137223,21 +137282,21 @@ class CacheCleaner {
}); });
}); });
} }
ageAllFiles(fileName = '*') { ageAllFiles() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Aging all files in Gradle User Home with name ${fileName}`); core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0)); yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
}); });
} }
touchAllFiles(fileName = '*') { touchAllFiles() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, arguments, void 0, function* (fileName = '*') {
core.debug(`Touching all files in Gradle User Home with name ${fileName}`); core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date()); yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
}); });
} }
setUtimes(pattern, timestamp) { setUtimes(pattern, timestamp) {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
const globber = yield glob.create(pattern, { const globber = yield glob.create(pattern, {
implicitDescendants: false implicitDescendants: false
}); });
@ -137415,8 +137474,8 @@ class AbstractEntryExtractor {
}); });
} }
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) { saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
var _a;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
var _a;
const cacheKey = uniqueFileNames const cacheKey = uniqueFileNames
? this.createCacheKeyFromFileNames(artifactType, matchingFiles) ? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
: yield this.createCacheKeyFromFileContents(artifactType, pattern); : yield this.createCacheKeyFromFileContents(artifactType, pattern);
@ -139520,7 +139579,7 @@ function firstString() {
/***/ ((module) => { /***/ ((module) => {
"use strict"; "use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.4","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }), /***/ }),

File diff suppressed because one or more lines are too long