mirror of
https://github.com/gradle/actions.git
synced 2025-04-23 03:09:20 +08:00
Update dist after merging from main
This commit is contained in:
parent
b9bc45cfbb
commit
214146fa35
148
dist/setup-gradle/main/index.js
vendored
148
dist/setup-gradle/main/index.js
vendored
@ -2328,9 +2328,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||||
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
||||||
const stream = __importStar(__nccwpck_require__(12781));
|
|
||||||
const fs_1 = __nccwpck_require__(57147);
|
|
||||||
const path = __importStar(__nccwpck_require__(71017));
|
|
||||||
const github = __importStar(__nccwpck_require__(21260));
|
const github = __importStar(__nccwpck_require__(21260));
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const httpClient = __importStar(__nccwpck_require__(96255));
|
const httpClient = __importStar(__nccwpck_require__(96255));
|
||||||
@ -2371,9 +2368,6 @@ function streamExtract(url, directory) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
if (error.message.includes('Malformed extraction path')) {
|
|
||||||
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
|
|
||||||
}
|
|
||||||
retryCount++;
|
retryCount++;
|
||||||
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
||||||
// wait 5 seconds before retrying
|
// wait 5 seconds before retrying
|
||||||
@ -2396,8 +2390,6 @@ function streamExtractExternal(url, directory) {
|
|||||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||||
};
|
};
|
||||||
const timer = setTimeout(timerFn, timeout);
|
const timer = setTimeout(timerFn, timeout);
|
||||||
const createdDirectories = new Set();
|
|
||||||
createdDirectories.add(directory);
|
|
||||||
response.message
|
response.message
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh();
|
timer.refresh();
|
||||||
@ -2407,47 +2399,11 @@ function streamExtractExternal(url, directory) {
|
|||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
reject(error);
|
reject(error);
|
||||||
})
|
})
|
||||||
.pipe(unzip_stream_1.default.Parse())
|
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||||
.pipe(new stream.Transform({
|
.on('close', () => {
|
||||||
objectMode: true,
|
|
||||||
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const fullPath = path.normalize(path.join(directory, entry.path));
|
|
||||||
if (!directory.endsWith(path.sep)) {
|
|
||||||
directory += path.sep;
|
|
||||||
}
|
|
||||||
if (!fullPath.startsWith(directory)) {
|
|
||||||
reject(new Error(`Malformed extraction path: ${fullPath}`));
|
|
||||||
}
|
|
||||||
if (entry.type === 'Directory') {
|
|
||||||
if (!createdDirectories.has(fullPath)) {
|
|
||||||
createdDirectories.add(fullPath);
|
|
||||||
yield resolveOrCreateDirectory(fullPath).then(() => {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info(`Extracting artifact entry: ${fullPath}`);
|
|
||||||
if (!createdDirectories.has(path.dirname(fullPath))) {
|
|
||||||
createdDirectories.add(path.dirname(fullPath));
|
|
||||||
yield resolveOrCreateDirectory(path.dirname(fullPath));
|
|
||||||
}
|
|
||||||
const writeStream = (0, fs_1.createWriteStream)(fullPath);
|
|
||||||
writeStream.on('finish', callback);
|
|
||||||
writeStream.on('error', reject);
|
|
||||||
entry.pipe(writeStream);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}))
|
|
||||||
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
resolve();
|
resolve();
|
||||||
}))
|
})
|
||||||
.on('error', (error) => {
|
.on('error', (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
@ -3068,10 +3024,7 @@ function getResultsServiceUrl() {
|
|||||||
exports.getResultsServiceUrl = getResultsServiceUrl;
|
exports.getResultsServiceUrl = getResultsServiceUrl;
|
||||||
function isGhes() {
|
function isGhes() {
|
||||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
|
||||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
|
||||||
return !isGitHubHost && !isGheHost;
|
|
||||||
}
|
}
|
||||||
exports.isGhes = isGhes;
|
exports.isGhes = isGhes;
|
||||||
function getGitHubWorkspaceDir() {
|
function getGitHubWorkspaceDir() {
|
||||||
@ -7174,10 +7127,7 @@ function assertDefined(name, value) {
|
|||||||
exports.assertDefined = assertDefined;
|
exports.assertDefined = assertDefined;
|
||||||
function isGhes() {
|
function isGhes() {
|
||||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
|
||||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
|
||||||
return !isGitHubHost && !isGheHost;
|
|
||||||
}
|
}
|
||||||
exports.isGhes = isGhes;
|
exports.isGhes = isGhes;
|
||||||
//# sourceMappingURL=cacheUtils.js.map
|
//# sourceMappingURL=cacheUtils.js.map
|
||||||
@ -15270,7 +15220,7 @@ class HttpClient {
|
|||||||
if (this._keepAlive && useProxy) {
|
if (this._keepAlive && useProxy) {
|
||||||
agent = this._proxyAgent;
|
agent = this._proxyAgent;
|
||||||
}
|
}
|
||||||
if (!useProxy) {
|
if (this._keepAlive && !useProxy) {
|
||||||
agent = this._agent;
|
agent = this._agent;
|
||||||
}
|
}
|
||||||
// if agent is already assigned use that agent.
|
// if agent is already assigned use that agent.
|
||||||
@ -15302,12 +15252,16 @@ class HttpClient {
|
|||||||
agent = tunnelAgent(agentOptions);
|
agent = tunnelAgent(agentOptions);
|
||||||
this._proxyAgent = agent;
|
this._proxyAgent = agent;
|
||||||
}
|
}
|
||||||
// if tunneling agent isn't assigned create a new agent
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||||
if (!agent) {
|
if (this._keepAlive && !agent) {
|
||||||
const options = { keepAlive: this._keepAlive, maxSockets };
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
||||||
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||||
this._agent = agent;
|
this._agent = agent;
|
||||||
}
|
}
|
||||||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||||
|
if (!agent) {
|
||||||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||||
|
}
|
||||||
if (usingSsl && this._ignoreSslError) {
|
if (usingSsl && this._ignoreSslError) {
|
||||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
@ -102844,43 +102798,35 @@ const coerce = (version, options) => {
|
|||||||
|
|
||||||
let match = null
|
let match = null
|
||||||
if (!options.rtl) {
|
if (!options.rtl) {
|
||||||
match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
|
match = version.match(re[t.COERCE])
|
||||||
} else {
|
} else {
|
||||||
// Find the right-most coercible string that does not share
|
// Find the right-most coercible string that does not share
|
||||||
// a terminus with a more left-ward coercible string.
|
// a terminus with a more left-ward coercible string.
|
||||||
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
||||||
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
|
|
||||||
//
|
//
|
||||||
// Walk through the string checking with a /g regexp
|
// Walk through the string checking with a /g regexp
|
||||||
// Manually set the index so as to pick up overlapping matches.
|
// Manually set the index so as to pick up overlapping matches.
|
||||||
// Stop when we get a match that ends at the string end, since no
|
// Stop when we get a match that ends at the string end, since no
|
||||||
// coercible string can be more right-ward without the same terminus.
|
// coercible string can be more right-ward without the same terminus.
|
||||||
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
|
|
||||||
let next
|
let next
|
||||||
while ((next = coerceRtlRegex.exec(version)) &&
|
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||||
(!match || match.index + match[0].length !== version.length)
|
(!match || match.index + match[0].length !== version.length)
|
||||||
) {
|
) {
|
||||||
if (!match ||
|
if (!match ||
|
||||||
next.index + next[0].length !== match.index + match[0].length) {
|
next.index + next[0].length !== match.index + match[0].length) {
|
||||||
match = next
|
match = next
|
||||||
}
|
}
|
||||||
coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
|
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||||
}
|
}
|
||||||
// leave it in a clean state
|
// leave it in a clean state
|
||||||
coerceRtlRegex.lastIndex = -1
|
re[t.COERCERTL].lastIndex = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
if (match === null) {
|
if (match === null) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const major = match[2]
|
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options)
|
||||||
const minor = match[3] || '0'
|
|
||||||
const patch = match[4] || '0'
|
|
||||||
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
|
|
||||||
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
|
|
||||||
|
|
||||||
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
|
|
||||||
}
|
}
|
||||||
module.exports = coerce
|
module.exports = coerce
|
||||||
|
|
||||||
@ -103572,17 +103518,12 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
|
|||||||
|
|
||||||
// Coercion.
|
// Coercion.
|
||||||
// Extract anything that could conceivably be a part of a valid semver
|
// Extract anything that could conceivably be a part of a valid semver
|
||||||
createToken('COERCEPLAIN', `${'(^|[^\\d])' +
|
createToken('COERCE', `${'(^|[^\\d])' +
|
||||||
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
|
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
|
||||||
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
|
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
|
||||||
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
|
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
|
||||||
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
|
|
||||||
createToken('COERCEFULL', src[t.COERCEPLAIN] +
|
|
||||||
`(?:${src[t.PRERELEASE]})?` +
|
|
||||||
`(?:${src[t.BUILD]})?` +
|
|
||||||
`(?:$|[^\\d])`)
|
`(?:$|[^\\d])`)
|
||||||
createToken('COERCERTL', src[t.COERCE], true)
|
createToken('COERCERTL', src[t.COERCE], true)
|
||||||
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
|
|
||||||
|
|
||||||
// Tilde ranges.
|
// Tilde ranges.
|
||||||
// Meaning is "reasonably at or greater than"
|
// Meaning is "reasonably at or greater than"
|
||||||
@ -139453,19 +139394,21 @@ exports.setup = void 0;
|
|||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const input_params_1 = __nccwpck_require__(23885);
|
const input_params_1 = __nccwpck_require__(23885);
|
||||||
function setup() {
|
function setup() {
|
||||||
if ((0, input_params_1.getBuildScanPublishEnabled)() && verifyTermsOfServiceAgreement()) {
|
if ((0, input_params_1.getBuildScanPublishEnabled)() && verifyTermsOfUseAgreement()) {
|
||||||
|
maybeExportVariable('DEVELOCITY_INJECTION_INIT_SCRIPT_NAME', 'gradle-actions.inject-develocity.init.gradle');
|
||||||
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true');
|
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true');
|
||||||
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2');
|
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2');
|
||||||
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13');
|
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13');
|
||||||
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', (0, input_params_1.getBuildScanTermsOfServiceUrl)());
|
maybeExportVariable('BUILD_SCAN_TERMS_OF_USE_URL', (0, input_params_1.getBuildScanTermsOfUseUrl)());
|
||||||
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', (0, input_params_1.getBuildScanTermsOfServiceAgree)());
|
maybeExportVariable('BUILD_SCAN_TERMS_OF_USE_AGREE', (0, input_params_1.getBuildScanTermsOfUseAgree)());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.setup = setup;
|
exports.setup = setup;
|
||||||
function verifyTermsOfServiceAgreement() {
|
function verifyTermsOfUseAgreement() {
|
||||||
if ((0, input_params_1.getBuildScanTermsOfServiceUrl)() !== 'https://gradle.com/terms-of-service' ||
|
if ((0, input_params_1.getBuildScanTermsOfUseUrl)() !== 'https://gradle.com/terms-of-service' ||
|
||||||
(0, input_params_1.getBuildScanTermsOfServiceAgree)() !== 'yes') {
|
(0, input_params_1.getBuildScanTermsOfUseUrl)() !== 'https://gradle.com/help/legal-terms-of-use' ||
|
||||||
core.warning(`Terms of service must be agreed in order to publish build scans.`);
|
(0, input_params_1.getBuildScanTermsOfUseAgree)() !== 'yes') {
|
||||||
|
core.warning(`Terms of use must be agreed in order to publish build scans.`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -139826,21 +139769,21 @@ class CacheCleaner {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ageAllFiles() {
|
ageAllFiles(fileName = '*') {
|
||||||
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
|
core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
|
||||||
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
|
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
touchAllFiles() {
|
touchAllFiles(fileName = '*') {
|
||||||
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
|
core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
|
||||||
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
|
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
setUtimes(pattern, timestamp) {
|
setUtimes(pattern, timestamp) {
|
||||||
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
var _a, e_1, _b, _c;
|
|
||||||
const globber = yield glob.create(pattern, {
|
const globber = yield glob.create(pattern, {
|
||||||
implicitDescendants: false
|
implicitDescendants: false
|
||||||
});
|
});
|
||||||
@ -140018,8 +139961,8 @@ class AbstractEntryExtractor {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
|
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
|
||||||
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
var _a;
|
|
||||||
const cacheKey = uniqueFileNames
|
const cacheKey = uniqueFileNames
|
||||||
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
|
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
|
||||||
: yield this.createCacheKeyFromFileContents(artifactType, pattern);
|
: yield this.createCacheKeyFromFileContents(artifactType, pattern);
|
||||||
@ -141322,7 +141265,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.JobSummaryOption = exports.DependencyGraphOption = exports.parseNumericInput = exports.getArtifactRetentionDays = exports.getDependencyGraphContinueOnFailure = exports.getDependencyGraphOption = exports.getBuildScanTermsOfServiceAgree = exports.getBuildScanTermsOfServiceUrl = exports.getBuildScanPublishEnabled = exports.getPRCommentOption = exports.getJobSummaryOption = exports.isJobSummaryEnabled = exports.getGithubToken = exports.getJobMatrix = exports.getArguments = exports.getGradleVersion = exports.getBuildRootDirectory = exports.getCacheExcludes = exports.getCacheIncludes = exports.getCacheEncryptionKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheStrictMatch = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
exports.JobSummaryOption = exports.DependencyGraphOption = exports.parseNumericInput = exports.getArtifactRetentionDays = exports.getDependencyGraphContinueOnFailure = exports.getDependencyGraphOption = exports.getBuildScanTermsOfUseAgree = exports.getBuildScanTermsOfUseUrl = exports.getBuildScanPublishEnabled = exports.getPRCommentOption = exports.getJobSummaryOption = exports.isJobSummaryEnabled = exports.getGithubToken = exports.getJobMatrix = exports.getArguments = exports.getGradleVersion = exports.getBuildRootDirectory = exports.getCacheExcludes = exports.getCacheIncludes = exports.getCacheEncryptionKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheStrictMatch = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const string_argv_1 = __nccwpck_require__(19663);
|
const string_argv_1 = __nccwpck_require__(19663);
|
||||||
function isCacheDisabled() {
|
function isCacheDisabled() {
|
||||||
@ -141402,14 +141345,21 @@ function getBuildScanPublishEnabled() {
|
|||||||
return getBooleanInput('build-scan-publish');
|
return getBooleanInput('build-scan-publish');
|
||||||
}
|
}
|
||||||
exports.getBuildScanPublishEnabled = getBuildScanPublishEnabled;
|
exports.getBuildScanPublishEnabled = getBuildScanPublishEnabled;
|
||||||
function getBuildScanTermsOfServiceUrl() {
|
function getBuildScanTermsOfUseUrl() {
|
||||||
return core.getInput('build-scan-terms-of-service-url');
|
return getTermsOfUseProp('build-scan-terms-of-use-url', 'build-scan-terms-of-service-url');
|
||||||
}
|
}
|
||||||
exports.getBuildScanTermsOfServiceUrl = getBuildScanTermsOfServiceUrl;
|
exports.getBuildScanTermsOfUseUrl = getBuildScanTermsOfUseUrl;
|
||||||
function getBuildScanTermsOfServiceAgree() {
|
function getBuildScanTermsOfUseAgree() {
|
||||||
return core.getInput('build-scan-terms-of-service-agree');
|
return getTermsOfUseProp('build-scan-terms-of-use-agree', 'build-scan-terms-of-service-agree');
|
||||||
|
}
|
||||||
|
exports.getBuildScanTermsOfUseAgree = getBuildScanTermsOfUseAgree;
|
||||||
|
function getTermsOfUseProp(newPropName, oldPropName) {
|
||||||
|
const newProp = core.getInput(newPropName);
|
||||||
|
if (newProp.length !== 0) {
|
||||||
|
return newProp;
|
||||||
|
}
|
||||||
|
return core.getInput(oldPropName);
|
||||||
}
|
}
|
||||||
exports.getBuildScanTermsOfServiceAgree = getBuildScanTermsOfServiceAgree;
|
|
||||||
function parseJobSummaryOption(paramName) {
|
function parseJobSummaryOption(paramName) {
|
||||||
const val = core.getInput(paramName);
|
const val = core.getInput(paramName);
|
||||||
switch (val.toLowerCase().trim()) {
|
switch (val.toLowerCase().trim()) {
|
||||||
@ -142474,7 +142424,7 @@ function firstString() {
|
|||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.4","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
148
dist/setup-gradle/post/index.js
vendored
148
dist/setup-gradle/post/index.js
vendored
@ -2328,9 +2328,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||||
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
||||||
const stream = __importStar(__nccwpck_require__(12781));
|
|
||||||
const fs_1 = __nccwpck_require__(57147);
|
|
||||||
const path = __importStar(__nccwpck_require__(71017));
|
|
||||||
const github = __importStar(__nccwpck_require__(21260));
|
const github = __importStar(__nccwpck_require__(21260));
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const httpClient = __importStar(__nccwpck_require__(96255));
|
const httpClient = __importStar(__nccwpck_require__(96255));
|
||||||
@ -2371,9 +2368,6 @@ function streamExtract(url, directory) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
if (error.message.includes('Malformed extraction path')) {
|
|
||||||
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
|
|
||||||
}
|
|
||||||
retryCount++;
|
retryCount++;
|
||||||
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
||||||
// wait 5 seconds before retrying
|
// wait 5 seconds before retrying
|
||||||
@ -2396,8 +2390,6 @@ function streamExtractExternal(url, directory) {
|
|||||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||||
};
|
};
|
||||||
const timer = setTimeout(timerFn, timeout);
|
const timer = setTimeout(timerFn, timeout);
|
||||||
const createdDirectories = new Set();
|
|
||||||
createdDirectories.add(directory);
|
|
||||||
response.message
|
response.message
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh();
|
timer.refresh();
|
||||||
@ -2407,47 +2399,11 @@ function streamExtractExternal(url, directory) {
|
|||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
reject(error);
|
reject(error);
|
||||||
})
|
})
|
||||||
.pipe(unzip_stream_1.default.Parse())
|
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||||
.pipe(new stream.Transform({
|
.on('close', () => {
|
||||||
objectMode: true,
|
|
||||||
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const fullPath = path.normalize(path.join(directory, entry.path));
|
|
||||||
if (!directory.endsWith(path.sep)) {
|
|
||||||
directory += path.sep;
|
|
||||||
}
|
|
||||||
if (!fullPath.startsWith(directory)) {
|
|
||||||
reject(new Error(`Malformed extraction path: ${fullPath}`));
|
|
||||||
}
|
|
||||||
if (entry.type === 'Directory') {
|
|
||||||
if (!createdDirectories.has(fullPath)) {
|
|
||||||
createdDirectories.add(fullPath);
|
|
||||||
yield resolveOrCreateDirectory(fullPath).then(() => {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info(`Extracting artifact entry: ${fullPath}`);
|
|
||||||
if (!createdDirectories.has(path.dirname(fullPath))) {
|
|
||||||
createdDirectories.add(path.dirname(fullPath));
|
|
||||||
yield resolveOrCreateDirectory(path.dirname(fullPath));
|
|
||||||
}
|
|
||||||
const writeStream = (0, fs_1.createWriteStream)(fullPath);
|
|
||||||
writeStream.on('finish', callback);
|
|
||||||
writeStream.on('error', reject);
|
|
||||||
entry.pipe(writeStream);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}))
|
|
||||||
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
resolve();
|
resolve();
|
||||||
}))
|
})
|
||||||
.on('error', (error) => {
|
.on('error', (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
@ -3068,10 +3024,7 @@ function getResultsServiceUrl() {
|
|||||||
exports.getResultsServiceUrl = getResultsServiceUrl;
|
exports.getResultsServiceUrl = getResultsServiceUrl;
|
||||||
function isGhes() {
|
function isGhes() {
|
||||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
|
||||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
|
||||||
return !isGitHubHost && !isGheHost;
|
|
||||||
}
|
}
|
||||||
exports.isGhes = isGhes;
|
exports.isGhes = isGhes;
|
||||||
function getGitHubWorkspaceDir() {
|
function getGitHubWorkspaceDir() {
|
||||||
@ -7174,10 +7127,7 @@ function assertDefined(name, value) {
|
|||||||
exports.assertDefined = assertDefined;
|
exports.assertDefined = assertDefined;
|
||||||
function isGhes() {
|
function isGhes() {
|
||||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
|
||||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
|
||||||
return !isGitHubHost && !isGheHost;
|
|
||||||
}
|
}
|
||||||
exports.isGhes = isGhes;
|
exports.isGhes = isGhes;
|
||||||
//# sourceMappingURL=cacheUtils.js.map
|
//# sourceMappingURL=cacheUtils.js.map
|
||||||
@ -15270,7 +15220,7 @@ class HttpClient {
|
|||||||
if (this._keepAlive && useProxy) {
|
if (this._keepAlive && useProxy) {
|
||||||
agent = this._proxyAgent;
|
agent = this._proxyAgent;
|
||||||
}
|
}
|
||||||
if (!useProxy) {
|
if (this._keepAlive && !useProxy) {
|
||||||
agent = this._agent;
|
agent = this._agent;
|
||||||
}
|
}
|
||||||
// if agent is already assigned use that agent.
|
// if agent is already assigned use that agent.
|
||||||
@ -15302,12 +15252,16 @@ class HttpClient {
|
|||||||
agent = tunnelAgent(agentOptions);
|
agent = tunnelAgent(agentOptions);
|
||||||
this._proxyAgent = agent;
|
this._proxyAgent = agent;
|
||||||
}
|
}
|
||||||
// if tunneling agent isn't assigned create a new agent
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||||
if (!agent) {
|
if (this._keepAlive && !agent) {
|
||||||
const options = { keepAlive: this._keepAlive, maxSockets };
|
const options = { keepAlive: this._keepAlive, maxSockets };
|
||||||
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||||
this._agent = agent;
|
this._agent = agent;
|
||||||
}
|
}
|
||||||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||||
|
if (!agent) {
|
||||||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||||
|
}
|
||||||
if (usingSsl && this._ignoreSslError) {
|
if (usingSsl && this._ignoreSslError) {
|
||||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||||
@ -100297,43 +100251,35 @@ const coerce = (version, options) => {
|
|||||||
|
|
||||||
let match = null
|
let match = null
|
||||||
if (!options.rtl) {
|
if (!options.rtl) {
|
||||||
match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE])
|
match = version.match(re[t.COERCE])
|
||||||
} else {
|
} else {
|
||||||
// Find the right-most coercible string that does not share
|
// Find the right-most coercible string that does not share
|
||||||
// a terminus with a more left-ward coercible string.
|
// a terminus with a more left-ward coercible string.
|
||||||
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
|
||||||
// With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4'
|
|
||||||
//
|
//
|
||||||
// Walk through the string checking with a /g regexp
|
// Walk through the string checking with a /g regexp
|
||||||
// Manually set the index so as to pick up overlapping matches.
|
// Manually set the index so as to pick up overlapping matches.
|
||||||
// Stop when we get a match that ends at the string end, since no
|
// Stop when we get a match that ends at the string end, since no
|
||||||
// coercible string can be more right-ward without the same terminus.
|
// coercible string can be more right-ward without the same terminus.
|
||||||
const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]
|
|
||||||
let next
|
let next
|
||||||
while ((next = coerceRtlRegex.exec(version)) &&
|
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||||
(!match || match.index + match[0].length !== version.length)
|
(!match || match.index + match[0].length !== version.length)
|
||||||
) {
|
) {
|
||||||
if (!match ||
|
if (!match ||
|
||||||
next.index + next[0].length !== match.index + match[0].length) {
|
next.index + next[0].length !== match.index + match[0].length) {
|
||||||
match = next
|
match = next
|
||||||
}
|
}
|
||||||
coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length
|
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||||
}
|
}
|
||||||
// leave it in a clean state
|
// leave it in a clean state
|
||||||
coerceRtlRegex.lastIndex = -1
|
re[t.COERCERTL].lastIndex = -1
|
||||||
}
|
}
|
||||||
|
|
||||||
if (match === null) {
|
if (match === null) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const major = match[2]
|
return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options)
|
||||||
const minor = match[3] || '0'
|
|
||||||
const patch = match[4] || '0'
|
|
||||||
const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''
|
|
||||||
const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''
|
|
||||||
|
|
||||||
return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options)
|
|
||||||
}
|
}
|
||||||
module.exports = coerce
|
module.exports = coerce
|
||||||
|
|
||||||
@ -101025,17 +100971,12 @@ createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
|
|||||||
|
|
||||||
// Coercion.
|
// Coercion.
|
||||||
// Extract anything that could conceivably be a part of a valid semver
|
// Extract anything that could conceivably be a part of a valid semver
|
||||||
createToken('COERCEPLAIN', `${'(^|[^\\d])' +
|
createToken('COERCE', `${'(^|[^\\d])' +
|
||||||
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
|
'(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
|
||||||
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
|
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
|
||||||
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`)
|
`(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
|
||||||
createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`)
|
|
||||||
createToken('COERCEFULL', src[t.COERCEPLAIN] +
|
|
||||||
`(?:${src[t.PRERELEASE]})?` +
|
|
||||||
`(?:${src[t.BUILD]})?` +
|
|
||||||
`(?:$|[^\\d])`)
|
`(?:$|[^\\d])`)
|
||||||
createToken('COERCERTL', src[t.COERCE], true)
|
createToken('COERCERTL', src[t.COERCE], true)
|
||||||
createToken('COERCERTLFULL', src[t.COERCEFULL], true)
|
|
||||||
|
|
||||||
// Tilde ranges.
|
// Tilde ranges.
|
||||||
// Meaning is "reasonably at or greater than"
|
// Meaning is "reasonably at or greater than"
|
||||||
@ -136906,19 +136847,21 @@ exports.setup = void 0;
|
|||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const input_params_1 = __nccwpck_require__(23885);
|
const input_params_1 = __nccwpck_require__(23885);
|
||||||
function setup() {
|
function setup() {
|
||||||
if ((0, input_params_1.getBuildScanPublishEnabled)() && verifyTermsOfServiceAgreement()) {
|
if ((0, input_params_1.getBuildScanPublishEnabled)() && verifyTermsOfUseAgreement()) {
|
||||||
|
maybeExportVariable('DEVELOCITY_INJECTION_INIT_SCRIPT_NAME', 'gradle-actions.inject-develocity.init.gradle');
|
||||||
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true');
|
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true');
|
||||||
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2');
|
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.2');
|
||||||
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13');
|
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.13');
|
||||||
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', (0, input_params_1.getBuildScanTermsOfServiceUrl)());
|
maybeExportVariable('BUILD_SCAN_TERMS_OF_USE_URL', (0, input_params_1.getBuildScanTermsOfUseUrl)());
|
||||||
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', (0, input_params_1.getBuildScanTermsOfServiceAgree)());
|
maybeExportVariable('BUILD_SCAN_TERMS_OF_USE_AGREE', (0, input_params_1.getBuildScanTermsOfUseAgree)());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.setup = setup;
|
exports.setup = setup;
|
||||||
function verifyTermsOfServiceAgreement() {
|
function verifyTermsOfUseAgreement() {
|
||||||
if ((0, input_params_1.getBuildScanTermsOfServiceUrl)() !== 'https://gradle.com/terms-of-service' ||
|
if ((0, input_params_1.getBuildScanTermsOfUseUrl)() !== 'https://gradle.com/terms-of-service' ||
|
||||||
(0, input_params_1.getBuildScanTermsOfServiceAgree)() !== 'yes') {
|
(0, input_params_1.getBuildScanTermsOfUseUrl)() !== 'https://gradle.com/help/legal-terms-of-use' ||
|
||||||
core.warning(`Terms of service must be agreed in order to publish build scans.`);
|
(0, input_params_1.getBuildScanTermsOfUseAgree)() !== 'yes') {
|
||||||
|
core.warning(`Terms of use must be agreed in order to publish build scans.`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -137279,21 +137222,21 @@ class CacheCleaner {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ageAllFiles() {
|
ageAllFiles(fileName = '*') {
|
||||||
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
|
core.debug(`Aging all files in Gradle User Home with name ${fileName}`);
|
||||||
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
|
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
touchAllFiles() {
|
touchAllFiles(fileName = '*') {
|
||||||
return __awaiter(this, arguments, void 0, function* (fileName = '*') {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
|
core.debug(`Touching all files in Gradle User Home with name ${fileName}`);
|
||||||
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
|
yield this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
setUtimes(pattern, timestamp) {
|
setUtimes(pattern, timestamp) {
|
||||||
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
var _a, e_1, _b, _c;
|
|
||||||
const globber = yield glob.create(pattern, {
|
const globber = yield glob.create(pattern, {
|
||||||
implicitDescendants: false
|
implicitDescendants: false
|
||||||
});
|
});
|
||||||
@ -137471,8 +137414,8 @@ class AbstractEntryExtractor {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
|
saveExtractedCacheEntry(matchingFiles, artifactType, pattern, uniqueFileNames, previouslyRestoredEntries, entryListener) {
|
||||||
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
var _a;
|
|
||||||
const cacheKey = uniqueFileNames
|
const cacheKey = uniqueFileNames
|
||||||
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
|
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
|
||||||
: yield this.createCacheKeyFromFileContents(artifactType, pattern);
|
: yield this.createCacheKeyFromFileContents(artifactType, pattern);
|
||||||
@ -138641,7 +138584,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.JobSummaryOption = exports.DependencyGraphOption = exports.parseNumericInput = exports.getArtifactRetentionDays = exports.getDependencyGraphContinueOnFailure = exports.getDependencyGraphOption = exports.getBuildScanTermsOfServiceAgree = exports.getBuildScanTermsOfServiceUrl = exports.getBuildScanPublishEnabled = exports.getPRCommentOption = exports.getJobSummaryOption = exports.isJobSummaryEnabled = exports.getGithubToken = exports.getJobMatrix = exports.getArguments = exports.getGradleVersion = exports.getBuildRootDirectory = exports.getCacheExcludes = exports.getCacheIncludes = exports.getCacheEncryptionKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheStrictMatch = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
exports.JobSummaryOption = exports.DependencyGraphOption = exports.parseNumericInput = exports.getArtifactRetentionDays = exports.getDependencyGraphContinueOnFailure = exports.getDependencyGraphOption = exports.getBuildScanTermsOfUseAgree = exports.getBuildScanTermsOfUseUrl = exports.getBuildScanPublishEnabled = exports.getPRCommentOption = exports.getJobSummaryOption = exports.isJobSummaryEnabled = exports.getGithubToken = exports.getJobMatrix = exports.getArguments = exports.getGradleVersion = exports.getBuildRootDirectory = exports.getCacheExcludes = exports.getCacheIncludes = exports.getCacheEncryptionKey = exports.isCacheCleanupEnabled = exports.isCacheDebuggingEnabled = exports.isCacheStrictMatch = exports.isCacheOverwriteExisting = exports.isCacheWriteOnly = exports.isCacheReadOnly = exports.isCacheDisabled = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const string_argv_1 = __nccwpck_require__(19663);
|
const string_argv_1 = __nccwpck_require__(19663);
|
||||||
function isCacheDisabled() {
|
function isCacheDisabled() {
|
||||||
@ -138721,14 +138664,21 @@ function getBuildScanPublishEnabled() {
|
|||||||
return getBooleanInput('build-scan-publish');
|
return getBooleanInput('build-scan-publish');
|
||||||
}
|
}
|
||||||
exports.getBuildScanPublishEnabled = getBuildScanPublishEnabled;
|
exports.getBuildScanPublishEnabled = getBuildScanPublishEnabled;
|
||||||
function getBuildScanTermsOfServiceUrl() {
|
function getBuildScanTermsOfUseUrl() {
|
||||||
return core.getInput('build-scan-terms-of-service-url');
|
return getTermsOfUseProp('build-scan-terms-of-use-url', 'build-scan-terms-of-service-url');
|
||||||
}
|
}
|
||||||
exports.getBuildScanTermsOfServiceUrl = getBuildScanTermsOfServiceUrl;
|
exports.getBuildScanTermsOfUseUrl = getBuildScanTermsOfUseUrl;
|
||||||
function getBuildScanTermsOfServiceAgree() {
|
function getBuildScanTermsOfUseAgree() {
|
||||||
return core.getInput('build-scan-terms-of-service-agree');
|
return getTermsOfUseProp('build-scan-terms-of-use-agree', 'build-scan-terms-of-service-agree');
|
||||||
|
}
|
||||||
|
exports.getBuildScanTermsOfUseAgree = getBuildScanTermsOfUseAgree;
|
||||||
|
function getTermsOfUseProp(newPropName, oldPropName) {
|
||||||
|
const newProp = core.getInput(newPropName);
|
||||||
|
if (newProp.length !== 0) {
|
||||||
|
return newProp;
|
||||||
|
}
|
||||||
|
return core.getInput(oldPropName);
|
||||||
}
|
}
|
||||||
exports.getBuildScanTermsOfServiceAgree = getBuildScanTermsOfServiceAgree;
|
|
||||||
function parseJobSummaryOption(paramName) {
|
function parseJobSummaryOption(paramName) {
|
||||||
const val = core.getInput(paramName);
|
const val = core.getInput(paramName);
|
||||||
switch (val.toLowerCase().trim()) {
|
switch (val.toLowerCase().trim()) {
|
||||||
@ -139569,7 +139519,7 @@ function firstString() {
|
|||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.4","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user