diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d6c8a15e5..c91166805d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th ## [UNRELEASED] +- Upcoming change: Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Pull request analyses will log a warning about this upcoming change. [#3562](https://github.com/github/codeql-action/pull/3562) + + To opt out of this change: + - **Repositories owned by an organization:** Create a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). Alternatively, if you are using an advanced setup workflow, you can set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. + - **User-owned repositories using default setup:** Switch to an advanced setup workflow and set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. + - **User-owned repositories using advanced setup:** Set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) - The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 454c2d9fb4..48566462a1 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -161733,6 +161733,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -162221,11 +162222,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/analyze-action.js b/lib/analyze-action.js index a65d7175cd..1fd5bf6219 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable8; + exports2.exportVariable = exportVariable9; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable8(name, val) { + function exportVariable9(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48747,7 +48747,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48759,23 +48759,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core16.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core16.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core15.debug(`matchDirectories '${result.matchDirectories}'`); + core16.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core16.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core16.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50403,7 +50403,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var fs18 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path16 = __importStar2(require("path")); @@ -50456,7 +50456,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core16.debug(`Search path '${searchPath}'`); try { yield __await2(fs18.promises.lstat(searchPath)); } catch (err) { @@ -50531,7 +50531,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core16.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50547,7 +50547,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core16.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50650,7 +50650,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles2; var crypto3 = __importStar2(require("crypto")); - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var fs18 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50659,7 +50659,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core15.info : core15.debug; + const writeDelegate = verbose ? core16.info : core16.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto3.createHash("sha256"); @@ -52050,7 +52050,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -52101,7 +52101,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); - core15.debug(`Matched: ${relativeFile}`); + core16.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52129,7 +52129,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core16.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52140,10 +52140,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core15.debug(err.message); + core16.debug(err.message); } versionOutput = versionOutput.trim(); - core15.debug(versionOutput); + core16.debug(versionOutput); return versionOutput; }); } @@ -52151,7 +52151,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core15.debug(`zstd version: ${version}`); + core16.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52276,14 +52276,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53010,19 +53010,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -57268,19 +57268,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -58190,14 +58190,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58335,8 +58335,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91807,7 +91807,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -91849,7 +91849,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core16.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91906,14 +91906,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core16.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core16.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -91998,7 +91998,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92056,9 +92056,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core16.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core15.debug(`${name} - Error is not retryable`); + core16.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92317,7 +92317,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92355,7 +92355,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core16.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92389,7 +92389,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core16.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92439,7 +92439,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core16.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92450,7 +92450,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core15.debug("Unable to validate download, no Content-Length header"); + core16.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92568,7 +92568,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core15.debug("Unable to determine content length, downloading file with http-client..."); + core16.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92658,7 +92658,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92678,9 +92678,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core16.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core16.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92716,12 +92716,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Download concurrency: ${result.downloadConcurrency}`); - core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core15.debug(`Lookup only: ${result.lookupOnly}`); + core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core16.debug(`Download concurrency: ${result.downloadConcurrency}`); + core16.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core16.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core16.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core16.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -92915,7 +92915,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache5; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs18 = __importStar2(require("fs")); @@ -92933,7 +92933,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core15.debug(`Resource Url: ${url2}`); + core16.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -92961,7 +92961,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core15.isDebug()) { + if (core16.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -92974,9 +92974,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core15.setSecret(cacheDownloadUrl); - core15.debug(`Cache Result:`); - core15.debug(JSON.stringify(cacheResult)); + core16.setSecret(cacheDownloadUrl); + core16.debug(`Cache Result:`); + core16.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -92990,10 +92990,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core16.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core16.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93036,7 +93036,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core16.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93058,7 +93058,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core15.debug("Awaiting all uploads"); + core16.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93101,16 +93101,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core15.debug("Upload cache"); + core16.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core15.debug("Commiting cache"); + core16.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core16.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core15.info("Cache saved successfully"); + core16.info("Cache saved successfully"); } }); } @@ -98593,7 +98593,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var path16 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98652,7 +98652,7 @@ var require_cache5 = __commonJS({ function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core16.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98667,8 +98667,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core16.debug("Resolved Keys:"); + core16.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98686,19 +98686,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core16.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core15.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core16.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98706,16 +98706,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error3.message}`); + core16.error(`Failed to restore: ${error3.message}`); } else { - core15.warning(`Failed to restore: ${error3.message}`); + core16.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98726,8 +98726,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core16.debug("Resolved Keys:"); + core16.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98745,30 +98745,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core15.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core16.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core15.info(`Cache hit for restore-key: ${response.matchedKey}`); + core16.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core15.info(`Cache hit for: ${response.matchedKey}`); + core16.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core16.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive path: ${archivePath}`); - core15.debug(`Starting download of archive to: ${archivePath}`); + core16.debug(`Archive path: ${archivePath}`); + core16.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core15.isDebug()) { + core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core16.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98776,9 +98776,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error3.message}`); + core16.error(`Failed to restore: ${error3.message}`); } else { - core15.warning(`Failed to restore: ${error3.message}`); + core16.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98787,7 +98787,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98796,7 +98796,7 @@ var require_cache5 = __commonJS({ function saveCache5(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core16.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98814,26 +98814,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core16.debug("Cache Paths:"); + core16.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core16.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core15.debug("Reserving Cache"); + core16.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98846,26 +98846,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core15.debug(`Saving Cache (ID: ${cacheId})`); + core16.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core16.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core16.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core16.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -98878,23 +98878,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core16.debug("Cache Paths:"); + core16.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core16.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core15.debug("Reserving Cache"); + core16.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -98905,16 +98905,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core15.warning(`Cache reservation failed: ${response.message}`); + core16.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core15.debug(`Failed to reserve cache: ${error3}`); + core16.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core15.debug(`Attempting to upload cache located at: ${archivePath}`); + core16.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -98922,7 +98922,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core16.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -98935,21 +98935,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core16.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core15.warning(typedError.message); + core16.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core16.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core16.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core15.info(err.message); + core16.info(err.message); } const seconds = this.getSleepAmount(); - core15.info(`Waiting ${seconds} seconds before trying again`); + core16.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); var fs18 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path16.join(_getTempDirectory(), crypto3.randomUUID()); yield io7.mkdirP(path16.dirname(dest)); - core15.debug(`Downloading ${url2}`); - core15.debug(`Destination ${dest}`); + core16.debug(`Downloading ${url2}`); + core16.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core15.debug("set auth"); + core16.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core16.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs18.createWriteStream(dest)); - core15.debug("download complete"); + core16.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core15.debug("download failed"); + core16.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core15.debug(`Failed to delete '${dest}'. ${err.message}`); + core16.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core16.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core15.debug("Checking tar --version"); + core16.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core15.debug(versionOutput.trim()); + core16.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core15.isDebug() && !flags.includes("v")) { + if (core16.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core15.isDebug()) { + if (core16.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core15.debug(`Using pwsh at path: ${pwshPath}`); + core16.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core15.debug(`Using powershell at path: ${powershellPath}`); + core16.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core15.isDebug()) { + if (!core16.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source dir: ${sourceDir}`); + core16.debug(`Caching tool ${tool} ${version} ${arch2}`); + core16.debug(`source dir: ${sourceDir}`); if (!fs18.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source file: ${sourceFile}`); + core16.debug(`Caching tool ${tool} ${version} ${arch2}`); + core16.debug(`source file: ${sourceFile}`); if (!fs18.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path16.join(destFolder, targetFile); - core15.debug(`destination file ${destPath}`); + core16.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core15.debug(`checking cache: ${cachePath}`); + core16.debug(`checking cache: ${cachePath}`); if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { - core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core16.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core15.debug("not found"); + core16.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core15.debug("set auth"); + core16.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core15.debug("Invalid json"); + core16.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core15.debug(`destination ${folderPath}`); + core16.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs18.writeFileSync(markerPath, ""); - core15.debug("finished caching tool"); + core16.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core15.debug(`isExplicit: ${c}`); + core16.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core15.debug(`explicit? ${valid3}`); + core16.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core15.debug(`evaluating ${versions.length} versions`); + core16.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core15.debug(`matched: ${version}`); + core16.debug(`matched: ${version}`); } else { - core15.debug("match not found"); + core16.debug("match not found"); } return version; } @@ -103236,7 +103236,7 @@ module.exports = __toCommonJS(analyze_action_exports); var fs17 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); var import_perf_hooks3 = require("perf_hooks"); -var core14 = __toESM(require_core()); +var core15 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -107232,6 +107232,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -107970,11 +107971,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -111229,7 +111227,7 @@ var fs16 = __toESM(require("fs")); var path14 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -112355,7 +112353,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core13 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io6 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -112497,7 +112497,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -112596,13 +112596,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core13.warning(httpError.message || GENERIC_403_MSG); + core14.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core13.warning(httpError.message || GENERIC_404_MSG); + core14.warning(httpError.message || GENERIC_404_MSG); break; default: - core13.warning(httpError.message); + core14.warning(httpError.message); break; } } @@ -112989,7 +112989,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -113206,7 +113206,7 @@ async function run(startedAt2) { } const apiDetails = getApiDetails(); const outputDir = getRequiredInput("output"); - core14.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); + core15.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); const threads = getThreadsFlag( getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger @@ -113263,8 +113263,8 @@ async function run(startedAt2) { for (const language of config.languages) { dbLocations[language] = getCodeQLDatabasePath(config, language); } - core14.setOutput("db-locations", dbLocations); - core14.setOutput("sarif-output", import_path4.default.resolve(outputDir)); + core15.setOutput("db-locations", dbLocations); + core15.setOutput("sarif-output", import_path4.default.resolve(outputDir)); const uploadKind = getUploadValue( getOptionalInput("upload") ); @@ -113281,13 +113281,13 @@ async function run(startedAt2) { getOptionalInput("post-processed-sarif-path") ); if (uploadResults["code-scanning" /* CodeScanning */] !== void 0) { - core14.setOutput( + core15.setOutput( "sarif-id", uploadResults["code-scanning" /* CodeScanning */].sarifID ); } if (uploadResults["code-quality" /* CodeQuality */] !== void 0) { - core14.setOutput( + core15.setOutput( "quality-sarif-id", uploadResults["code-quality" /* CodeQuality */].sarifID ); @@ -113330,15 +113330,15 @@ async function run(startedAt2) { ); } if (getOptionalInput("expect-error") === "true") { - core14.setFailed( + core15.setFailed( `expect-error input was set to true but no error was thrown.` ); } - core14.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); + core15.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); if (getOptionalInput("expect-error") !== "true" || hasBadExpectErrorInput()) { - core14.setFailed(error3.message); + core15.setFailed(error3.message); } await sendStatusReport2( startedAt2, @@ -113409,7 +113409,7 @@ async function runWrapper() { try { await runPromise; } catch (error3) { - core14.setFailed(`analyze action failed: ${getErrorMessage(error3)}`); + core15.setFailed(`analyze action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "finish" /* Analyze */, startedAt, diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index acd1b250e7..9724643100 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -103786,6 +103786,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -104270,11 +104271,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/init-action-post.js b/lib/init-action-post.js index d457853ad0..8d1b4b710f 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable9; + exports2.exportVariable = exportVariable10; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable9(name, val) { + function exportVariable10(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context3, operator, key, modifier) { - var value = context3[key], result = []; +function getValues(context4, operator, key, modifier) { + var value = context4[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context3) { +function expand(template, context4) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context3) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context4, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48747,7 +48747,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48759,23 +48759,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core17.debug(`implicitDescendants '${result.implicitDescendants}'`); + core18.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core17.debug(`matchDirectories '${result.matchDirectories}'`); + core18.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core17.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core18.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50403,7 +50403,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var fs20 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path19 = __importStar2(require("path")); @@ -50456,7 +50456,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core17.debug(`Search path '${searchPath}'`); + core18.debug(`Search path '${searchPath}'`); try { yield __await2(fs20.promises.lstat(searchPath)); } catch (err) { @@ -50531,7 +50531,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core17.debug(`Broken symlink '${item.path}'`); + core18.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50547,7 +50547,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50650,7 +50650,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles2; var crypto2 = __importStar2(require("crypto")); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var fs20 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50659,7 +50659,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core17.info : core17.debug; + const writeDelegate = verbose ? core18.info : core18.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -52050,7 +52050,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var exec3 = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -52101,7 +52101,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path19.relative(workspace, file).replace(new RegExp(`\\${path19.sep}`, "g"), "/"); - core17.debug(`Matched: ${relativeFile}`); + core18.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52129,7 +52129,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core17.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core18.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec3.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52140,10 +52140,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core17.debug(err.message); + core18.debug(err.message); } versionOutput = versionOutput.trim(); - core17.debug(versionOutput); + core18.debug(versionOutput); return versionOutput; }); } @@ -52151,7 +52151,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core17.debug(`zstd version: ${version}`); + core18.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52276,14 +52276,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _2, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context3 = {}; - for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; - context3.addInitializer = function(f) { + var context4 = {}; + for (var p in contextIn) context4[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context4.access[p] = contextIn.access[p]; + context4.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context4); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53010,19 +53010,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context3 = createLoggerContext({ + var context4 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context3.logger; + exports2.TypeSpecRuntimeLogger = context4.logger; function setLogLevel(logLevel) { - context3.setLogLevel(logLevel); + context4.setLogLevel(logLevel); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -57268,19 +57268,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context3 = (0, logger_1.createLoggerContext)({ + var context4 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context3.logger; + exports2.AzureLogger = context4.logger; function setLogLevel(level) { - context3.setLogLevel(level); + context4.setLogLevel(level); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -58190,14 +58190,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context3 = new TracingContextImpl(options.parentContext); + let context4 = new TracingContextImpl(options.parentContext); if (options.span) { - context3 = context3.setValue(exports2.knownContextKeys.span, options.span); + context4 = context4.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); + context4 = context4.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context3; + return context4; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58335,8 +58335,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context3, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); + function withContext(context4, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context4, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91807,7 +91807,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -91849,7 +91849,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core18.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91906,14 +91906,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core17.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core18.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core17.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core18.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -91998,7 +91998,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92056,9 +92056,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core17.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core17.debug(`${name} - Error is not retryable`); + core18.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92317,7 +92317,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92355,7 +92355,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core17.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core18.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92389,7 +92389,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core18.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92439,7 +92439,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core17.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core18.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92450,7 +92450,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core17.debug("Unable to validate download, no Content-Length header"); + core18.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92568,7 +92568,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core17.debug("Unable to determine content length, downloading file with http-client..."); + core18.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92658,7 +92658,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92678,9 +92678,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core17.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core18.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92716,12 +92716,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Download concurrency: ${result.downloadConcurrency}`); - core17.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core17.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core17.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core17.debug(`Lookup only: ${result.lookupOnly}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Download concurrency: ${result.downloadConcurrency}`); + core18.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core18.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core18.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core18.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -92915,7 +92915,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache5; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs20 = __importStar2(require("fs")); @@ -92933,7 +92933,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core17.debug(`Resource Url: ${url2}`); + core18.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -92961,7 +92961,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core17.isDebug()) { + if (core18.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -92974,9 +92974,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core17.setSecret(cacheDownloadUrl); - core17.debug(`Cache Result:`); - core17.debug(JSON.stringify(cacheResult)); + core18.setSecret(cacheDownloadUrl); + core18.debug(`Cache Result:`); + core18.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -92990,10 +92990,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core17.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core18.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core17.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core18.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93036,7 +93036,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core17.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core18.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93058,7 +93058,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core17.debug("Awaiting all uploads"); + core18.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93101,16 +93101,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core17.debug("Upload cache"); + core18.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core17.debug("Commiting cache"); + core18.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core18.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core17.info("Cache saved successfully"); + core18.info("Cache saved successfully"); } }); } @@ -98593,7 +98593,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var path19 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98652,7 +98652,7 @@ var require_cache5 = __commonJS({ function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98667,8 +98667,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98686,19 +98686,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98706,16 +98706,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error3.message}`); + core18.error(`Failed to restore: ${error3.message}`); } else { - core17.warning(`Failed to restore: ${error3.message}`); + core18.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98726,8 +98726,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98745,30 +98745,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core17.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core18.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core17.info(`Cache hit for restore-key: ${response.matchedKey}`); + core18.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core17.info(`Cache hit for: ${response.matchedKey}`); + core18.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive path: ${archivePath}`); - core17.debug(`Starting download of archive to: ${archivePath}`); + core18.debug(`Archive path: ${archivePath}`); + core18.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core17.isDebug()) { + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98776,9 +98776,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error3.message}`); + core18.error(`Failed to restore: ${error3.message}`); } else { - core17.warning(`Failed to restore: ${error3.message}`); + core18.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98787,7 +98787,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98796,7 +98796,7 @@ var require_cache5 = __commonJS({ function saveCache5(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98814,26 +98814,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core17.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98846,26 +98846,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core17.debug(`Saving Cache (ID: ${cacheId})`); + core18.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -98878,23 +98878,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core17.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -98905,16 +98905,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core17.warning(`Cache reservation failed: ${response.message}`); + core18.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core17.debug(`Failed to reserve cache: ${error3}`); + core18.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core17.debug(`Attempting to upload cache located at: ${archivePath}`); + core18.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -98922,7 +98922,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core17.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core18.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -98935,21 +98935,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core17.warning(typedError.message); + core18.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core17.info(err.message); + core18.info(err.message); } const seconds = this.getSleepAmount(); - core17.info(`Waiting ${seconds} seconds before trying again`); + core18.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs20 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path19.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path19.dirname(dest)); - core17.debug(`Downloading ${url2}`); - core17.debug(`Destination ${dest}`); + core18.debug(`Downloading ${url2}`); + core18.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core17.debug("set auth"); + core18.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core17.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core18.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs20.createWriteStream(dest)); - core17.debug("download complete"); + core18.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core17.debug("download failed"); + core18.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core17.debug(`Failed to delete '${dest}'. ${err.message}`); + core18.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core17.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core18.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core17.debug("Checking tar --version"); + core18.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core17.debug(versionOutput.trim()); + core18.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core17.isDebug() && !flags.includes("v")) { + if (core18.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core17.isDebug()) { + if (core18.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core17.debug(`Using pwsh at path: ${pwshPath}`); + core18.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core17.debug(`Using powershell at path: ${powershellPath}`); + core18.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core17.isDebug()) { + if (!core18.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os4.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source dir: ${sourceDir}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source dir: ${sourceDir}`); if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os4.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source file: ${sourceFile}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source file: ${sourceFile}`); if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path19.join(destFolder, targetFile); - core17.debug(`destination file ${destPath}`); + core18.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path19.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core17.debug(`checking cache: ${cachePath}`); + core18.debug(`checking cache: ${cachePath}`); if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { - core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core18.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core17.debug("not found"); + core18.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core17.debug("set auth"); + core18.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core17.debug("Invalid json"); + core18.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core17.debug(`destination ${folderPath}`); + core18.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs20.writeFileSync(markerPath, ""); - core17.debug("finished caching tool"); + core18.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core17.debug(`isExplicit: ${c}`); + core18.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core17.debug(`explicit? ${valid3}`); + core18.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core17.debug(`evaluating ${versions.length} versions`); + core18.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core17.debug(`matched: ${version}`); + core18.debug(`matched: ${version}`); } else { - core17.debug("match not found"); + core18.debug("match not found"); } return version; } @@ -102438,14 +102438,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core17.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core18.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -102783,7 +102783,7 @@ var require_util19 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.maskSecretUrls = exports2.maskSigUrl = exports2.getBackendIdsFromToken = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault2(require_jwt_decode_cjs()); var core_1 = require_core(); @@ -102810,8 +102810,8 @@ var require_util19 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core17.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core17.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core18.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core18.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -103171,7 +103171,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_commonjs15(); var config_1 = require_config2(); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var crypto2 = __importStar2(require("crypto")); var stream2 = __importStar2(require("stream")); var errors_1 = require_errors4(); @@ -103197,9 +103197,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core17.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core18.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core17.info(`Uploaded bytes ${progress.loadedBytes}`); + core18.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -103213,7 +103213,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto2.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core17.info("Beginning upload of artifact content to blob storage"); + core18.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -103227,12 +103227,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core17.info("Finished uploading artifact content to blob storage!"); + core18.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core17.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`); + core18.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core17.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core18.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -106563,8 +106563,8 @@ var require_graceful_fs = __commonJS({ } function noop3() { } - function publishQueue(context3, queue2) { - Object.defineProperty(context3, gracefulQueue, { + function publishQueue(context4, queue2) { + Object.defineProperty(context4, gracefulQueue, { get: function() { return queue2; } @@ -118730,7 +118730,7 @@ var require_commonjs21 = __commonJS({ free: c.#free, // methods isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context3) => c.#backgroundFetch(k, index, options, context3), + backgroundFetch: (k, index, options, context4) => c.#backgroundFetch(k, index, options, context4), moveToTail: (index) => c.#moveToTail(index), indexes: (options) => c.#indexes(options), rindexes: (options) => c.#rindexes(options), @@ -119533,7 +119533,7 @@ var require_commonjs21 = __commonJS({ const v = this.#valList[index]; return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; } - #backgroundFetch(k, index, options, context3) { + #backgroundFetch(k, index, options, context4) { const v = index === void 0 ? void 0 : this.#valList[index]; if (this.#isBackgroundFetch(v)) { return v; @@ -119546,7 +119546,7 @@ var require_commonjs21 = __commonJS({ const fetchOpts = { signal: ac.signal, options, - context: context3 + context: context4 }; const cb = (v2, updateCache = false) => { const { aborted } = ac.signal; @@ -119663,7 +119663,7 @@ var require_commonjs21 = __commonJS({ allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - context: context3, + context: context4, forceRefresh = false, status, signal @@ -119698,7 +119698,7 @@ var require_commonjs21 = __commonJS({ if (index === void 0) { if (status) status.fetch = "miss"; - const p = this.#backgroundFetch(k, index, options, context3); + const p = this.#backgroundFetch(k, index, options, context4); return p.__returned = p; } else { const v = this.#valList[index]; @@ -119723,7 +119723,7 @@ var require_commonjs21 = __commonJS({ this.#statusTTL(status, index); return v; } - const p = this.#backgroundFetch(k, index, options, context3); + const p = this.#backgroundFetch(k, index, options, context4); const hasStale = p.__staleWhileFetching !== void 0; const staleVal = hasStale && allowStale; if (status) { @@ -119745,13 +119745,13 @@ var require_commonjs21 = __commonJS({ if (!memoMethod) { throw new Error("no memoMethod provided to constructor"); } - const { context: context3, forceRefresh, ...options } = memoOptions; + const { context: context4, forceRefresh, ...options } = memoOptions; const v = this.get(k, options); if (!forceRefresh && v !== void 0) return v; const vv = memoMethod(k, v, { options, - context: context3 + context: context4 }); this.set(k, vv, options); return vv; @@ -128573,7 +128573,7 @@ var require_zip2 = __commonJS({ var stream2 = __importStar2(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar2(require_archiver()); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream2.Transform { @@ -128590,7 +128590,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification_1) { return __awaiter2(this, arguments, void 0, function* (uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { - core17.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core18.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -128614,8 +128614,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core17.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core17.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core18.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core18.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -128623,24 +128623,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error3) => { - core17.error("An error has occurred while creating the zip file for upload"); - core17.info(error3); + core18.error("An error has occurred while creating the zip file for upload"); + core18.info(error3); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error3) => { if (error3.code === "ENOENT") { - core17.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core17.info(error3); + core18.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core18.info(error3); } else { - core17.warning(`A non-blocking warning has occurred during artifact zip creation: ${error3.code}`); - core17.info(error3); + core18.warning(`A non-blocking warning has occurred during artifact zip creation: ${error3.code}`); + core18.info(error3); } }; var zipFinishCallback = () => { - core17.debug("Zip stream for upload has finished."); + core18.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core17.debug("Zip stream for upload has ended."); + core18.debug("Zip stream for upload has ended."); }; } }); @@ -128705,7 +128705,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -128752,13 +128752,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core17.info(`Finalizing artifact upload`); + core18.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core17.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core18.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -132645,18 +132645,18 @@ var require_webidl3 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts = void 0) { @@ -137982,15 +137982,15 @@ var require_api_request3 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -138017,7 +138017,7 @@ var require_api_request3 = __commonJS({ trailers: this.trailers, opaque, body, - context: context3 + context: context4 }); } } @@ -138137,15 +138137,15 @@ var require_api_stream3 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -138173,7 +138173,7 @@ var require_api_stream3 = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -138365,17 +138365,17 @@ var require_api_pipeline3 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; assert(!res, "pipeline cannot be retried"); if (ret.destroyed) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -138393,7 +138393,7 @@ var require_api_pipeline3 = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -138477,7 +138477,7 @@ var require_api_upgrade3 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } @@ -138488,7 +138488,7 @@ var require_api_upgrade3 = __commonJS({ throw new SocketError("bad upgrade", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; assert.strictEqual(statusCode, 101); removeSignal(this); this.callback = null; @@ -138497,7 +138497,7 @@ var require_api_upgrade3 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -138565,18 +138565,18 @@ var require_api_connect3 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -138588,7 +138588,7 @@ var require_api_connect3 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -146707,8 +146707,8 @@ var require_dist_node2 = __commonJS({ function isKeyOperator2(operator) { return operator === ";" || operator === "&" || operator === "?"; } - function getValues2(context3, operator, key, modifier) { - var value = context3[key], result = []; + function getValues2(context4, operator, key, modifier) { + var value = context4[key], result = []; if (isDefined3(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -146772,7 +146772,7 @@ var require_dist_node2 = __commonJS({ expand: expand2.bind(null, template) }; } - function expand2(template, context3) { + function expand2(template, context4) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -146786,7 +146786,7 @@ var require_dist_node2 = __commonJS({ } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues2(context3, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues2(context4, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -152238,8 +152238,8 @@ var require_download_artifact = __commonJS({ var promises_1 = __importDefault2(require("fs/promises")); var crypto2 = __importStar2(require("crypto")); var stream2 = __importStar2(require("stream")); - var github3 = __importStar2(require_github2()); - var core17 = __importStar2(require_core()); + var github4 = __importStar2(require_github2()); + var core18 = __importStar2(require_core()); var httpClient = __importStar2(require_lib()); var unzip_stream_1 = __importDefault2(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -152275,7 +152275,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url2, directory); } catch (error3) { retryCount++; - core17.debug(`Failed to download artifact after ${retryCount} retries due to ${error3.message}. Retrying in 5 seconds...`); + core18.debug(`Failed to download artifact after ${retryCount} retries due to ${error3.message}. Retrying in 5 seconds...`); yield new Promise((resolve8) => setTimeout(resolve8, 5e3)); } } @@ -152305,7 +152305,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error3) => { - core17.debug(`response.message: Artifact download failed: ${error3.message}`); + core18.debug(`response.message: Artifact download failed: ${error3.message}`); clearTimeout(timer); reject(error3); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -152313,7 +152313,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core17.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core18.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve8({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error3) => { @@ -152326,9 +152326,9 @@ var require_download_artifact = __commonJS({ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) { return __awaiter2(this, void 0, void 0, function* () { const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); - const api = github3.getOctokit(token); + const api = github4.getOctokit(token); let digestMismatch = false; - core17.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core18.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -152345,16 +152345,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core17.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core17.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core17.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core17.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error3) { @@ -152381,7 +152381,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core17.warning("Multiple artifacts found, defaulting to first."); + core18.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -152389,16 +152389,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core17.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core17.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core17.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core17.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error3) { @@ -152411,10 +152411,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory() { return __awaiter2(this, arguments, void 0, function* (downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { if (!(yield exists(downloadPath))) { - core17.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core18.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core17.debug(`Artifact destination folder already exists: ${downloadPath}`); + core18.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -152455,7 +152455,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -152470,7 +152470,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core17.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core18.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -152627,7 +152627,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node12(); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var utils_1 = require_utils11(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node11(); @@ -152647,8 +152647,8 @@ var require_get_artifact = __commonJS({ retry: retryOpts, request: requestOpts }; - const github3 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); - const getArtifactResp = yield github3.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}", { + const github4 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const getArtifactResp = yield github4.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -152665,7 +152665,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -152698,7 +152698,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -152771,9 +152771,9 @@ var require_delete_artifact = __commonJS({ retry: retryOpts, request: requestOpts }; - const github3 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const github4 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); const getArtifactResp = yield (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token); - const deleteArtifactResp = yield github3.rest.actions.deleteArtifact({ + const deleteArtifactResp = yield github4.rest.actions.deleteArtifact({ owner: repositoryOwner, repo: repositoryName, artifact_id: getArtifactResp.artifact.id @@ -152880,9 +152880,9 @@ var require_list_artifacts = __commonJS({ retry: retryOpts, request: requestOpts }; - const github3 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const github4 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); let currentPageNumber = 1; - const { data: listArtifactResponse } = yield github3.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { + const { data: listArtifactResponse } = yield github4.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -152907,7 +152907,7 @@ var require_list_artifacts = __commonJS({ currentPageNumber++; for (currentPageNumber; currentPageNumber <= numberOfPages; currentPageNumber++) { (0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`); - const { data: listArtifactResponse2 } = yield github3.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { + const { data: listArtifactResponse2 } = yield github4.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -155806,7 +155806,7 @@ var require_core3 = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable9(name, val) { + function exportVariable10(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -155815,7 +155815,7 @@ var require_core3 = __commonJS({ } (0, command_1.issueCommand)("set-env", { name }, convertedVal); } - exports2.exportVariable = exportVariable9; + exports2.exportVariable = exportVariable10; function setSecret(secret) { (0, command_1.issueCommand)("add-mask", {}, secret); } @@ -157436,7 +157436,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils13(); - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var config_variables_1 = require_config_variables(); function retry2(name, operation, customErrorMessages, maxAttempts) { return __awaiter2(this, void 0, void 0, function* () { @@ -157463,13 +157463,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error3.message; } if (!isRetryable) { - core17.info(`${name} - Error is not retryable`); + core18.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core17.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -157553,7 +157553,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs20 = __importStar2(require("fs")); - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var tmp = __importStar2(require_tmp_promise()); var stream2 = __importStar2(require("stream")); var utils_1 = require_utils13(); @@ -157618,7 +157618,7 @@ var require_upload_http_client = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core17.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core18.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -157655,15 +157655,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core17.isDebug()) { - core17.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core18.isDebug()) { + core18.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core17.error(`aborting artifact upload`); + core18.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -157672,7 +157672,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core17.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core18.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -157698,16 +157698,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core17.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core18.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs20.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core17.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core18.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream2.PassThrough(); passThrough.end(buffer); @@ -157719,7 +157719,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core17.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -157728,16 +157728,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core17.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core18.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core17.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core18.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -157757,7 +157757,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core17.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -157765,7 +157765,7 @@ var require_upload_http_client = __commonJS({ } } } - core17.debug(`deleting temporary gzip file ${tempFile.path}`); + core18.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -157804,7 +157804,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core17.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core18.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -157812,14 +157812,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter2(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core17.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -157827,7 +157827,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error3) { - core17.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core18.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error3); if (incrementAndCheckRetryLimit()) { return false; @@ -157839,13 +157839,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core17.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core18.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core17.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core18.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -157863,7 +157863,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core17.debug(`URL is ${resourceUrl.toString()}`); + core18.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -157876,7 +157876,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core17.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core18.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -157945,7 +157945,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs20 = __importStar2(require("fs")); - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var zlib3 = __importStar2(require("zlib")); var utils_1 = require_utils13(); var url_1 = require("url"); @@ -157999,11 +157999,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter2(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core17.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core18.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core17.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core18.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter2(this, void 0, void 0, function* () { @@ -158012,8 +158012,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core17.isDebug()) { - core17.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core18.isDebug()) { + core18.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -158051,19 +158051,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core17.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core17.info("Skipping download validation."); + core18.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -158084,7 +158084,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error3) { - core17.info("An error occurred while attempting to download a file"); + core18.info("An error occurred while attempting to download a file"); console.log(error3); yield backOff(); continue; @@ -158104,7 +158104,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core17.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core18.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -158126,29 +158126,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib3.createGunzip(); response.message.on("error", (error3) => { - core17.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error3); }).pipe(gunzip).on("error", (error3) => { - core17.info(`An error occurred while attempting to decompress the response stream`); + core18.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error3); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error3) => { - core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error3); }); } else { response.message.on("error", (error3) => { - core17.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error3); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error3) => { - core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error3); }); } @@ -158287,7 +158287,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils13(); @@ -158308,7 +158308,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter2(this, void 0, void 0, function* () { - core17.info(`Starting artifact upload + core18.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -158320,24 +158320,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core17.warning(`No files found that can be uploaded`); + core18.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core17.debug(response.toString()); + core18.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core17.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core17.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core18.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core18.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core17.info(`File upload process has finished. Finalizing the artifact upload`); + core18.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core17.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core18.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core17.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core18.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core17.info(` + core18.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -158371,10 +158371,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path19 = (0, path_1.resolve)(path19); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path19, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core18.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core17.info("Directory structure has been set up for the artifact"); + core18.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -158390,7 +158390,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core17.info("Unable to find any artifacts for the associated workflow"); + core18.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path19) { @@ -158402,11 +158402,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core18.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path19, true); if (downloadSpecification.filesToDownload.length === 0) { - core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core18.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -161336,7 +161336,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/init-action-post.ts -var core16 = __toESM(require_core()); +var core17 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -165128,6 +165128,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -165735,11 +165736,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -168344,14 +168342,14 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs19 = __toESM(require("fs")); var import_path3 = __toESM(require("path")); -var github2 = __toESM(require_github()); +var github3 = __toESM(require_github()); // src/upload-lib.ts var fs17 = __toESM(require("fs")); var path16 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -169477,7 +169475,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core13 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io6 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -169619,7 +169619,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -169718,13 +169718,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core13.warning(httpError.message || GENERIC_403_MSG); + core14.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core13.warning(httpError.message || GENERIC_404_MSG); + core14.warning(httpError.message || GENERIC_404_MSG); break; default: - core13.warning(httpError.message); + core14.warning(httpError.message); break; } } @@ -170082,7 +170082,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -170125,7 +170125,7 @@ function filterAlertsByDiffRange(logger, sarifLog) { var fs18 = __toESM(require("fs")); var path17 = __toESM(require("path")); var import_zlib2 = __toESM(require("zlib")); -var core14 = __toESM(require_core()); +var core15 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -170416,7 +170416,7 @@ async function uploadFailureInfo(uploadAllAvailableDebugArtifacts, printDebugLog ); } if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true") { - if (!github2.context.payload.pull_request?.head.repo.fork) { + if (!github3.context.payload.pull_request?.head.repo.fork) { await removeUploadedSarif(uploadFailedSarifResult, logger); } else { logger.info( @@ -170559,7 +170559,7 @@ async function removeUploadedSarif(uploadFailedSarifResult, logger) { // src/status-report.ts var os3 = __toESM(require("os")); -var core15 = __toESM(require_core()); +var core16 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -170596,12 +170596,12 @@ function getJobStatusDisplayName(status) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core15.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core15.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -170620,14 +170620,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core15.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core16.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core15.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core16.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -170710,9 +170710,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core15.debug(`Sending status report: ${statusReportJSON}`); + core16.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core15.debug("In test mode. Status reports are not uploaded."); + core16.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -170732,28 +170732,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core15.warning( + core16.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core15.warning( + core16.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core15.warning(httpError.message); + core16.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core15.debug(INCOMPATIBLE_MSG); + core16.debug(INCOMPATIBLE_MSG); } else { - core15.debug(OUT_OF_DATE_MSG); + core16.debug(OUT_OF_DATE_MSG); } return; } } - core15.warning( + core16.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -170824,7 +170824,7 @@ async function run(startedAt) { } } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core16.setFailed(error3.message); + core17.setFailed(error3.message); const statusReportBase2 = await createStatusReportBase( "init-post" /* InitPost */, getActionsStatus(error3), @@ -170869,14 +170869,14 @@ function getFinalJobStatus(config) { } let jobStatus; if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { - core16.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, "JOB_STATUS_SUCCESS" /* SuccessStatus */); + core17.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, "JOB_STATUS_SUCCESS" /* SuccessStatus */); jobStatus = "JOB_STATUS_SUCCESS" /* SuccessStatus */; } else if (config !== void 0) { jobStatus = "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */; } else { jobStatus = "JOB_STATUS_UNKNOWN" /* UnknownStatus */; } - core16.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, jobStatus); + core17.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, jobStatus); return jobStatus; } function getJobStatusFromEnvironment() { @@ -170895,7 +170895,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core16.setFailed(`init post action failed: ${wrapError(error3).message}`); + core17.setFailed(`init post action failed: ${wrapError(error3).message}`); await sendUnhandledErrorStatusReport( "init-post" /* InitPost */, startedAt, diff --git a/lib/init-action.js b/lib/init-action.js index 5d5a6fa598..b177651be4 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable6; + exports2.exportVariable = exportVariable7; exports2.setSecret = setSecret; exports2.addPath = addPath2; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable6(name, val) { + function exportVariable7(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context3, operator, key, modifier) { - var value = context3[key], result = []; +function getValues(context4, operator, key, modifier) { + var value = context4[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context3) { +function expand(template, context4) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context3) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context4, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48898,7 +48898,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48910,23 +48910,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50554,7 +50554,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs16 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path17 = __importStar2(require("path")); @@ -50607,7 +50607,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await2(fs16.promises.lstat(searchPath)); } catch (err) { @@ -50682,7 +50682,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50698,7 +50698,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50801,7 +50801,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles2; var crypto3 = __importStar2(require("crypto")); - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs16 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50810,7 +50810,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core14.info : core14.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto3.createHash("sha256"); @@ -52201,7 +52201,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -52252,7 +52252,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path17.relative(workspace, file).replace(new RegExp(`\\${path17.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52280,7 +52280,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52291,10 +52291,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -52302,7 +52302,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver10.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52427,14 +52427,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context3 = {}; - for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; - context3.addInitializer = function(f) { + var context4 = {}; + for (var p in contextIn) context4[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context4.access[p] = contextIn.access[p]; + context4.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context4); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53161,19 +53161,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context3 = createLoggerContext({ + var context4 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context3.logger; + exports2.TypeSpecRuntimeLogger = context4.logger; function setLogLevel(logLevel) { - context3.setLogLevel(logLevel); + context4.setLogLevel(logLevel); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -57419,19 +57419,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context3 = (0, logger_1.createLoggerContext)({ + var context4 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context3.logger; + exports2.AzureLogger = context4.logger; function setLogLevel(level) { - context3.setLogLevel(level); + context4.setLogLevel(level); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -58341,14 +58341,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context3 = new TracingContextImpl(options.parentContext); + let context4 = new TracingContextImpl(options.parentContext); if (options.span) { - context3 = context3.setValue(exports2.knownContextKeys.span, options.span); + context4 = context4.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); + context4 = context4.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context3; + return context4; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58486,8 +58486,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context3, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); + function withContext(context4, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context4, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91958,7 +91958,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -92000,7 +92000,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92057,14 +92057,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -92149,7 +92149,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92207,9 +92207,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92468,7 +92468,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92506,7 +92506,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92540,7 +92540,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92590,7 +92590,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92601,7 +92601,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92719,7 +92719,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92809,7 +92809,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92829,9 +92829,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92867,12 +92867,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -93066,7 +93066,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache5; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs16 = __importStar2(require("fs")); @@ -93084,7 +93084,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url}`); + core15.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -93112,7 +93112,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -93125,9 +93125,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -93141,10 +93141,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93187,7 +93187,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93209,7 +93209,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93252,16 +93252,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -98744,7 +98744,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var path17 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98803,7 +98803,7 @@ var require_cache5 = __commonJS({ function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98818,8 +98818,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98837,19 +98837,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path17.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98857,16 +98857,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98877,8 +98877,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98896,30 +98896,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path17.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98927,9 +98927,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98938,7 +98938,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98947,7 +98947,7 @@ var require_cache5 = __commonJS({ function saveCache5(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98965,26 +98965,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path17.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98997,26 +98997,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99029,23 +99029,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path17.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -99056,16 +99056,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core14.debug(`Failed to reserve cache: ${error3}`); + core15.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -99073,7 +99073,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -99086,21 +99086,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99327,7 +99327,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99350,10 +99350,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); var fs16 = __importStar2(require("fs")); @@ -99485,8 +99485,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path17.join(_getTempDirectory(), crypto3.randomUUID()); yield io7.mkdirP(path17.dirname(dest)); - core14.debug(`Downloading ${url}`); - core14.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99512,7 +99512,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99521,7 +99521,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99530,16 +99530,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs16.createWriteStream(dest)); - core14.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core15.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99554,7 +99554,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99607,7 +99607,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99617,7 +99617,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99625,7 +99625,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99656,7 +99656,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -99699,7 +99699,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99719,7 +99719,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99728,7 +99728,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99739,8 +99739,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver10.clean(version) || version; arch2 = arch2 || os6.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs16.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99757,14 +99757,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver10.clean(version) || version; arch2 = arch2 || os6.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs16.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path17.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99787,12 +99787,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver10.clean(versionSpec) || ""; const cachePath = path17.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs16.existsSync(cachePath) && fs16.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -99821,7 +99821,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99842,7 +99842,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -99866,7 +99866,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path17.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -99878,18 +99878,18 @@ var require_tool_cache = __commonJS({ const folderPath = path17.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs16.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver10.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver10.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver10.gt(a, b)) { return 1; @@ -99905,9 +99905,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -100490,8 +100490,8 @@ __export(init_action_exports, { module.exports = __toCommonJS(init_action_exports); var fs15 = __toESM(require("fs")); var path16 = __toESM(require("path")); -var core13 = __toESM(require_core()); -var github2 = __toESM(require_github()); +var core14 = __toESM(require_core()); +var github3 = __toESM(require_github()); var io6 = __toESM(require_io()); var semver9 = __toESM(require_semver2()); @@ -104406,6 +104406,7 @@ function getUnknownLanguagesError(languages) { var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); function isString(value) { @@ -104422,7 +104423,8 @@ var booleanProperty = { }; var repositoryPropertyParsers = { ["github-codeql-disable-overlay" /* DISABLE_OVERLAY */]: booleanProperty, - ["github-codeql-extra-queries" /* EXTRA_QUERIES */]: stringProperty + ["github-codeql-extra-queries" /* EXTRA_QUERIES */]: stringProperty, + ["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */]: booleanProperty }; async function loadPropertiesFromApi(logger, repositoryNwo) { try { @@ -105527,11 +105529,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -107142,7 +107141,9 @@ var internal = { // src/init.ts var fs13 = __toESM(require("fs")); var path14 = __toESM(require("path")); +var core11 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); // src/codeql.ts @@ -109086,20 +109087,82 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = } } } -async function getFileCoverageInformationEnabled(debugMode, repositoryNwo, features) { - return ( - // Always enable file coverage information in debug mode - debugMode || // We're most interested in speeding up PRs, and we want to keep - // submitting file coverage information for the default branch since - // it is used to populate the status page. - !isAnalyzingPullRequest() || // For now, restrict this feature to the GitHub org - repositoryNwo.owner !== "github" || !await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */) - ); +async function getFileCoverageInformationEnabled(debugMode, codeql, features, repositoryProperties) { + if (debugMode) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; + } + if (!isAnalyzingPullRequest()) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; + } + if ((process.env["CODEQL_ACTION_FILE_COVERAGE_ON_PRS" /* FILE_COVERAGE_ON_PRS */] || "").toLocaleLowerCase() === "true") { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; + } + if (repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { + return { + enabled: true, + enabledByRepositoryProperty: true, + showDeprecationWarning: false + }; + } + if (!await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql)) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: true + }; + } + return { + enabled: false, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; +} +function logFileCoverageOnPrsDeprecationWarning(logger) { + if (process.env["CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */]) { + return; + } + const repositoryOwnerType = github2.context.payload.repository?.owner.type; + let message = "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses."; + const envVarOptOut = "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`."; + const repoPropertyOptOut = 'create a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to `true` in the repository\'s settings.'; + if (repositoryOwnerType === "Organization") { + if (isDefaultSetup()) { + message += ` + +To opt out of this change, ${repoPropertyOptOut}`; + } else { + message += ` + +To opt out of this change, ${envVarOptOut} Alternatively, ${repoPropertyOptOut}`; + } + } else if (isDefaultSetup()) { + message += ` + +To opt out of this change, switch to an advanced setup workflow and ${envVarOptOut}`; + } else { + message += ` + +To opt out of this change, ${envVarOptOut}`; + } + logger.warning(message); + core11.exportVariable("CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, "true"); } // src/status-report.ts var os5 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -109115,12 +109178,12 @@ function getActionsStatus(error3, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -109139,14 +109202,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -109229,9 +109292,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core12.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core12.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -109251,28 +109314,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core12.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning( + core12.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core11.warning(httpError.message); + core12.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core12.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core12.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core12.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -109354,7 +109417,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error var fs14 = __toESM(require("fs")); var path15 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -109477,7 +109540,7 @@ async function validateWorkflow(codeql, logger) { } catch (e) { return `error: formatWorkflowErrors() failed: ${String(e)}`; } - core12.warning(message); + core13.warning(message); } return formatWorkflowCause(workflowErrors); } @@ -109523,7 +109586,7 @@ async function getWorkflowAbsolutePath(logger) { } async function checkWorkflow(logger, codeql) { if (!isDynamicWorkflow() && process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { - core12.startGroup("Validating workflow"); + core13.startGroup("Validating workflow"); const validateWorkflowResult = await internal2.validateWorkflow( codeql, logger @@ -109535,7 +109598,7 @@ async function checkWorkflow(logger, codeql) { `Unable to validate code scanning workflow: ${validateWorkflowResult}` ); } - core12.endGroup(); + core13.endGroup(); } } var internal2 = { @@ -109643,8 +109706,8 @@ async function run(startedAt) { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); - core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); + core14.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core14.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); configFile = getOptionalInput("config-file"); sourceRoot = path16.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), @@ -109697,12 +109760,19 @@ async function run(startedAt) { ); } if (semver9.lt(actualVer, publicPreview)) { - core13.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); + core14.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); logger.info("Experimental Rust analysis enabled"); } } analysisKinds = await getAnalysisKinds(logger); - const debugMode = getOptionalInput("debug") === "true" || core13.isDebug(); + const debugMode = getOptionalInput("debug") === "true" || core14.isDebug(); + const repositoryProperties = repositoryPropertiesResult.orElse({}); + const fileCoverageResult = await getFileCoverageInformationEnabled( + debugMode, + codeql, + features, + repositoryProperties + ); config = await initConfig2(features, { analysisKinds, languagesInput: getOptionalInput("languages"), @@ -109730,12 +109800,8 @@ async function run(startedAt) { githubVersion: gitHubVersion, apiDetails, features, - repositoryProperties: repositoryPropertiesResult.orElse({}), - enableFileCoverageInformation: await getFileCoverageInformationEnabled( - debugMode, - repositoryNwo, - features - ), + repositoryProperties, + enableFileCoverageInformation: fileCoverageResult.enabled, logger }); if (repositoryPropertiesResult.isFailure()) { @@ -109750,10 +109816,23 @@ async function run(startedAt) { ) ); } + if (fileCoverageResult.enabledByRepositoryProperty) { + addNoLanguageDiagnostic( + config, + makeTelemetryDiagnostic( + "codeql-action/file-coverage-on-prs-enabled-by-repository-property", + "File coverage on PRs enabled by repository property", + {} + ) + ); + } + if (fileCoverageResult.showDeprecationWarning) { + logFileCoverageOnPrsDeprecationWarning(logger); + } await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core13.setFailed(error3.message); + core14.setFailed(error3.message); const statusReportBase = await createStatusReportBase( "init" /* Init */, error3 instanceof ConfigurationError ? "user-error" : "aborted", @@ -109803,8 +109882,8 @@ async function run(startedAt) { } const goFlags = process.env["GOFLAGS"]; if (goFlags) { - core13.exportVariable("GOFLAGS", goFlags); - core13.warning( + core14.exportVariable("GOFLAGS", goFlags); + core14.warning( "Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action." ); } @@ -109828,7 +109907,7 @@ async function run(startedAt) { "bin" ); fs15.mkdirSync(tempBinPath, { recursive: true }); - core13.addPath(tempBinPath); + core14.addPath(tempBinPath); const goWrapperPath = path16.resolve(tempBinPath, "go"); fs15.writeFileSync( goWrapperPath, @@ -109837,14 +109916,14 @@ async function run(startedAt) { exec ${goBinaryPath} "$@"` ); fs15.chmodSync(goWrapperPath, "755"); - core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); + core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( `Analyzing Go on Linux, but failed to install wrapper script. Tracing custom builds may fail: ${e}` ); } } else { - core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); + core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); } } catch (e) { logger.warning( @@ -109871,20 +109950,20 @@ exec ${goBinaryPath} "$@"` } } } - core13.exportVariable( + core14.exportVariable( "CODEQL_RAM", process.env["CODEQL_RAM"] || getCodeQLMemoryLimit(getOptionalInput("ram"), logger).toString() ); - core13.exportVariable( + core14.exportVariable( "CODEQL_THREADS", process.env["CODEQL_THREADS"] || getThreadsFlagValue(getOptionalInput("threads"), logger).toString() ); if (await features.getValue("disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */)) { - core13.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); + core14.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); } const kotlinLimitVar = "CODEQL_EXTRACTOR_KOTLIN_OVERRIDE_MAXIMUM_VERSION_LIMIT"; if (await codeQlVersionAtLeast(codeql, "2.20.3") && !await codeQlVersionAtLeast(codeql, "2.20.4")) { - core13.exportVariable(kotlinLimitVar, "2.1.20"); + core14.exportVariable(kotlinLimitVar, "2.1.20"); } if (config.languages.includes("cpp" /* cpp */)) { const envVar = "CODEQL_EXTRACTOR_CPP_TRAP_CACHING"; @@ -109894,10 +109973,10 @@ exec ${goBinaryPath} "$@"` ); } else if (getTrapCachingEnabled() && await codeQlVersionAtLeast(codeql, "2.17.5")) { logger.info("Enabling CodeQL C++ TRAP caching support"); - core13.exportVariable(envVar, "true"); + core14.exportVariable(envVar, "true"); } else { logger.info("Disabling CodeQL C++ TRAP caching support"); - core13.exportVariable(envVar, "false"); + core14.exportVariable(envVar, "false"); } } if (shouldRestoreCache(config.dependencyCachingEnabled)) { @@ -109912,7 +109991,7 @@ exec ${goBinaryPath} "$@"` } if (await codeQlVersionAtLeast(codeql, "2.17.1")) { } else { - core13.exportVariable( + core14.exportVariable( "CODEQL_EXTRACTOR_PYTHON_DISABLE_LIBRARY_EXTRACTION", "true" ); @@ -109938,7 +110017,7 @@ exec ${goBinaryPath} "$@"` "python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */, codeql )) { - core13.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); + core14.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); } } if (process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]) { @@ -109946,7 +110025,7 @@ exec ${goBinaryPath} "$@"` `${"CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */} is already set to '${process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]}', so the Action will not override it.` ); } else if (await codeQlVersionAtLeast(codeql, CODEQL_VERSION_JAR_MINIMIZATION) && config.dependencyCachingEnabled && config.buildMode === "none" /* None */ && config.languages.includes("java" /* java */)) { - core13.exportVariable( + core14.exportVariable( "CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */, "true" ); @@ -109990,23 +110069,23 @@ exec ${goBinaryPath} "$@"` const tracerConfig = await getCombinedTracerConfig(codeql, config); if (tracerConfig !== void 0) { for (const [key, value] of Object.entries(tracerConfig.env)) { - core13.exportVariable(key, value); + core14.exportVariable(key, value); } } if (await features.getValue("java_network_debugging" /* JavaNetworkDebugging */)) { const existingJavaToolOptions = getOptionalEnvVar("JAVA_TOOL_OPTIONS" /* JAVA_TOOL_OPTIONS */) || ""; - core13.exportVariable( + core14.exportVariable( "JAVA_TOOL_OPTIONS" /* JAVA_TOOL_OPTIONS */, `${existingJavaToolOptions} -Djavax.net.debug=all` ); } flushDiagnostics(config); await saveConfig(config, logger); - core13.setOutput("codeql-path", config.codeQLCmd); - core13.setOutput("codeql-version", (await codeql.getVersion()).version); + core14.setOutput("codeql-path", config.codeQLCmd); + core14.setOutput("codeql-version", (await codeql.getVersion()).version); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core13.setFailed(error3.message); + core14.setFailed(error3.message); await sendCompletedStatusReport( startedAt, config, @@ -110039,7 +110118,7 @@ exec ${goBinaryPath} "$@"` ); } async function loadRepositoryProperties(repositoryNwo, logger) { - const repositoryOwnerType = github2.context.payload.repository?.owner.type; + const repositoryOwnerType = github3.context.payload.repository?.owner.type; logger.debug( `Repository owner type is '${repositoryOwnerType ?? "unknown"}'.` ); @@ -110080,7 +110159,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core13.setFailed(`init action failed: ${getErrorMessage(error3)}`); + core14.setFailed(`init action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "init" /* Init */, startedAt, diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index aa3673bd33..f4fe15e316 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -103785,6 +103785,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -104261,11 +104262,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index a9eb08eb59..1a1a597e02 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable6; + exports2.exportVariable = exportVariable7; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable6(name, val) { + function exportVariable7(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -47450,7 +47450,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -47462,23 +47462,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -49106,7 +49106,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var fs9 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path9 = __importStar2(require("path")); @@ -49159,7 +49159,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await2(fs9.promises.lstat(searchPath)); } catch (err) { @@ -49234,7 +49234,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -49250,7 +49250,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -49353,7 +49353,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var fs9 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -49362,7 +49362,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -50753,7 +50753,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); @@ -50804,7 +50804,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path9.relative(workspace, file).replace(new RegExp(`\\${path9.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -50832,7 +50832,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -50843,10 +50843,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -50854,7 +50854,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -50979,14 +50979,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -51713,19 +51713,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -55971,19 +55971,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -56893,14 +56893,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -57038,8 +57038,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -90510,7 +90510,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -90552,7 +90552,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -90609,14 +90609,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -90701,7 +90701,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -90759,9 +90759,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -91020,7 +91020,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -91058,7 +91058,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -91092,7 +91092,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91142,7 +91142,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -91153,7 +91153,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -91271,7 +91271,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -91361,7 +91361,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -91381,9 +91381,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -91419,12 +91419,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -91618,7 +91618,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs9 = __importStar2(require("fs")); @@ -91636,7 +91636,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -91664,7 +91664,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -91677,9 +91677,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -91693,10 +91693,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -91739,7 +91739,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -91761,7 +91761,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -91804,16 +91804,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -97296,7 +97296,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var path9 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -97355,7 +97355,7 @@ var require_cache5 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -97370,8 +97370,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97389,19 +97389,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path9.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -97409,16 +97409,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error3.message}`); + core14.error(`Failed to restore: ${error3.message}`); } else { - core13.warning(`Failed to restore: ${error3.message}`); + core14.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97429,8 +97429,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97448,30 +97448,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core13.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path9.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -97479,9 +97479,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error3.message}`); + core14.error(`Failed to restore: ${error3.message}`); } else { - core13.warning(`Failed to restore: ${error3.message}`); + core14.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -97490,7 +97490,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97499,7 +97499,7 @@ var require_cache5 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -97517,26 +97517,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path9.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -97549,26 +97549,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -97581,23 +97581,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path9.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -97608,16 +97608,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core13.warning(`Cache reservation failed: ${response.message}`); + core14.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core13.debug(`Failed to reserve cache: ${error3}`); + core14.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -97625,7 +97625,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -97638,21 +97638,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core13.warning(typedError.message); + core14.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs9 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path9.join(_getTempDirectory(), crypto2.randomUUID()); yield io6.mkdirP(path9.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs9.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs9.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs9.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path9.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path9.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs9.existsSync(cachePath) && fs9.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs9.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -100332,7 +100332,7 @@ var require_follow_redirects = __commonJS({ }); // src/setup-codeql-action.ts -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // node_modules/uuid/dist-node/stringify.js var byteToHex = []; @@ -104158,11 +104158,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -104511,7 +104508,9 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) { } // src/init.ts +var core11 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); // src/codeql.ts @@ -104779,6 +104778,7 @@ var semver5 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -106455,7 +106455,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe // src/status-report.ts var os2 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -106471,12 +106471,12 @@ function getActionsStatus(error3, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -106495,14 +106495,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -106585,9 +106585,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core12.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core12.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -106607,28 +106607,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core12.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning( + core12.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core11.warning(httpError.message); + core12.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core12.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core12.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core12.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -106718,7 +106718,7 @@ async function run(startedAt) { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core12.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, "starting", @@ -106747,12 +106747,12 @@ async function run(startedAt) { toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport; toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; - core12.setOutput("codeql-path", codeql.getPath()); - core12.setOutput("codeql-version", (await codeql.getVersion()).version); - core12.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); + core13.setOutput("codeql-path", codeql.getPath()); + core13.setOutput("codeql-version", (await codeql.getVersion()).version); + core13.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core12.setFailed(error3.message); + core13.setFailed(error3.message); const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, error3 instanceof ConfigurationError ? "user-error" : "failure", @@ -106783,7 +106783,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core12.setFailed(`setup-codeql action failed: ${getErrorMessage(error3)}`); + core13.setFailed(`setup-codeql action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "setup-codeql" /* SetupCodeQL */, startedAt, diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index 6fdfe2d8be..3bbd51bab1 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -161368,6 +161368,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -161627,11 +161628,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 84519a068d..d678b0191e 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -120950,11 +120950,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -121386,6 +121383,7 @@ var semver5 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 2f5585bd19..d3c6ef7a5f 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable5; + exports2.exportVariable = exportVariable6; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable5(name, val) { + function exportVariable6(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -26137,18 +26137,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -32144,17 +32144,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -32191,7 +32191,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -32360,17 +32360,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -32398,7 +32398,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -32590,7 +32590,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -32599,10 +32599,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -32620,7 +32620,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -32704,7 +32704,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -32718,7 +32718,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -32726,7 +32726,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -32795,20 +32795,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -32820,7 +32820,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -41475,8 +41475,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -41540,7 +41540,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -41554,7 +41554,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48747,7 +48747,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48759,23 +48759,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core12.debug(`implicitDescendants '${result.implicitDescendants}'`); + core13.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core12.debug(`matchDirectories '${result.matchDirectories}'`); + core13.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core12.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50403,7 +50403,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var fs13 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path12 = __importStar2(require("path")); @@ -50456,7 +50456,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core12.debug(`Search path '${searchPath}'`); + core13.debug(`Search path '${searchPath}'`); try { yield __await2(fs13.promises.lstat(searchPath)); } catch (err) { @@ -50531,7 +50531,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core12.debug(`Broken symlink '${item.path}'`); + core13.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50547,7 +50547,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50650,7 +50650,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var fs13 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50659,7 +50659,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core12.info : core12.debug; + const writeDelegate = verbose ? core13.info : core13.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -52050,7 +52050,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); @@ -52101,7 +52101,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path12.relative(workspace, file).replace(new RegExp(`\\${path12.sep}`, "g"), "/"); - core12.debug(`Matched: ${relativeFile}`); + core13.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52129,7 +52129,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52140,10 +52140,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core12.debug(err.message); + core13.debug(err.message); } versionOutput = versionOutput.trim(); - core12.debug(versionOutput); + core13.debug(versionOutput); return versionOutput; }); } @@ -52151,7 +52151,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core12.debug(`zstd version: ${version}`); + core13.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52276,14 +52276,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53010,19 +53010,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -57268,19 +57268,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -58190,14 +58190,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58335,8 +58335,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91807,7 +91807,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -91849,7 +91849,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91906,14 +91906,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -91998,7 +91998,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92056,9 +92056,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core12.debug(`${name} - Error is not retryable`); + core13.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92317,7 +92317,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92355,7 +92355,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92389,7 +92389,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92439,7 +92439,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92450,7 +92450,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core12.debug("Unable to validate download, no Content-Length header"); + core13.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92568,7 +92568,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core12.debug("Unable to determine content length, downloading file with http-client..."); + core13.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92658,7 +92658,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92678,9 +92678,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92716,12 +92716,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Download concurrency: ${result.downloadConcurrency}`); - core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core12.debug(`Lookup only: ${result.lookupOnly}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Download concurrency: ${result.downloadConcurrency}`); + core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core13.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -92915,7 +92915,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs13 = __importStar2(require("fs")); @@ -92933,7 +92933,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core12.debug(`Resource Url: ${url2}`); + core13.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -92961,7 +92961,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core12.isDebug()) { + if (core13.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -92974,9 +92974,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core12.setSecret(cacheDownloadUrl); - core12.debug(`Cache Result:`); - core12.debug(JSON.stringify(cacheResult)); + core13.setSecret(cacheDownloadUrl); + core13.debug(`Cache Result:`); + core13.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -92990,10 +92990,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93036,7 +93036,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93058,7 +93058,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core12.debug("Awaiting all uploads"); + core13.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93101,16 +93101,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core12.debug("Upload cache"); + core13.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core12.debug("Commiting cache"); + core13.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core12.info("Cache saved successfully"); + core13.info("Cache saved successfully"); } }); } @@ -98593,7 +98593,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var path12 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98652,7 +98652,7 @@ var require_cache5 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98667,8 +98667,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98686,19 +98686,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98706,16 +98706,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error3.message}`); + core13.error(`Failed to restore: ${error3.message}`); } else { - core12.warning(`Failed to restore: ${error3.message}`); + core13.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98726,8 +98726,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98745,30 +98745,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core12.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core13.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core12.info(`Cache hit for restore-key: ${response.matchedKey}`); + core13.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core12.info(`Cache hit for: ${response.matchedKey}`); + core13.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive path: ${archivePath}`); - core12.debug(`Starting download of archive to: ${archivePath}`); + core13.debug(`Archive path: ${archivePath}`); + core13.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core12.isDebug()) { + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98776,9 +98776,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error3.message}`); + core13.error(`Failed to restore: ${error3.message}`); } else { - core12.warning(`Failed to restore: ${error3.message}`); + core13.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98787,7 +98787,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98796,7 +98796,7 @@ var require_cache5 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98814,26 +98814,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core12.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98846,26 +98846,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core12.debug(`Saving Cache (ID: ${cacheId})`); + core13.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -98878,23 +98878,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core12.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -98905,16 +98905,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core12.warning(`Cache reservation failed: ${response.message}`); + core13.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core12.debug(`Failed to reserve cache: ${error3}`); + core13.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core12.debug(`Attempting to upload cache located at: ${archivePath}`); + core13.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -98922,7 +98922,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -98935,21 +98935,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core12.warning(typedError.message); + core13.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core12.info(err.message); + core13.info(err.message); } const seconds = this.getSleepAmount(); - core12.info(`Waiting ${seconds} seconds before trying again`); + core13.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs13 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path12.join(_getTempDirectory(), crypto2.randomUUID()); yield io6.mkdirP(path12.dirname(dest)); - core12.debug(`Downloading ${url2}`); - core12.debug(`Destination ${dest}`); + core13.debug(`Downloading ${url2}`); + core13.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core12.debug("set auth"); + core13.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core13.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs13.createWriteStream(dest)); - core12.debug("download complete"); + core13.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core12.debug("download failed"); + core13.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core12.debug(`Failed to delete '${dest}'. ${err.message}`); + core13.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core12.debug("Checking tar --version"); + core13.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core12.debug(versionOutput.trim()); + core13.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core12.isDebug() && !flags.includes("v")) { + if (core13.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core12.isDebug()) { + if (core13.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core12.debug(`Using pwsh at path: ${pwshPath}`); + core13.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core12.debug(`Using powershell at path: ${powershellPath}`); + core13.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core12.isDebug()) { + if (!core13.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch2}`); - core12.debug(`source dir: ${sourceDir}`); + core13.debug(`Caching tool ${tool} ${version} ${arch2}`); + core13.debug(`source dir: ${sourceDir}`); if (!fs13.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch2}`); - core12.debug(`source file: ${sourceFile}`); + core13.debug(`Caching tool ${tool} ${version} ${arch2}`); + core13.debug(`source file: ${sourceFile}`); if (!fs13.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path12.join(destFolder, targetFile); - core12.debug(`destination file ${destPath}`); + core13.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core12.debug(`checking cache: ${cachePath}`); + core13.debug(`checking cache: ${cachePath}`); if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) { - core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core12.debug("not found"); + core13.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core12.debug("set auth"); + core13.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core12.debug("Invalid json"); + core13.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core12.debug(`destination ${folderPath}`); + core13.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs13.writeFileSync(markerPath, ""); - core12.debug("finished caching tool"); + core13.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core12.debug(`isExplicit: ${c}`); + core13.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core12.debug(`explicit? ${valid3}`); + core13.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core12.debug(`evaluating ${versions.length} versions`); + core13.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core12.debug(`matched: ${version}`); + core13.debug(`matched: ${version}`); } else { - core12.debug("match not found"); + core13.debug("match not found"); } return version; } @@ -103253,7 +103253,7 @@ var fs12 = __toESM(require("fs")); var path11 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/actions-util.ts @@ -106826,6 +106826,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -107417,11 +107418,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -110242,7 +110240,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core11 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -110384,7 +110384,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -110483,13 +110483,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core11.warning(httpError.message || GENERIC_403_MSG); + core12.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core11.warning(httpError.message || GENERIC_404_MSG); + core12.warning(httpError.message || GENERIC_404_MSG); break; default: - core11.warning(httpError.message); + core12.warning(httpError.message); break; } } @@ -110923,7 +110923,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core11.exportVariable(sentinelEnvVar, sentinelEnvVar); + core12.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index dab78eb861..4cee33261d 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -161518,6 +161518,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -161789,11 +161790,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 53f3856536..28befb5182 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable6; + exports2.exportVariable = exportVariable7; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable6(name, val) { + function exportVariable7(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -47450,7 +47450,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -47462,23 +47462,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -49106,7 +49106,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs14 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path13 = __importStar2(require("path")); @@ -49159,7 +49159,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await2(fs14.promises.lstat(searchPath)); } catch (err) { @@ -49234,7 +49234,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -49250,7 +49250,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -49353,7 +49353,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs14 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -49362,7 +49362,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core14.info : core14.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -50753,7 +50753,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); @@ -50804,7 +50804,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path13.relative(workspace, file).replace(new RegExp(`\\${path13.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -50832,7 +50832,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -50843,10 +50843,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -50854,7 +50854,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -50979,14 +50979,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -51713,19 +51713,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -55971,19 +55971,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -56893,14 +56893,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -57038,8 +57038,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -90510,7 +90510,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -90552,7 +90552,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -90609,14 +90609,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -90701,7 +90701,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -90759,9 +90759,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -91020,7 +91020,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -91058,7 +91058,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -91092,7 +91092,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91142,7 +91142,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -91153,7 +91153,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -91271,7 +91271,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -91361,7 +91361,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -91381,9 +91381,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -91419,12 +91419,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -91618,7 +91618,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs14 = __importStar2(require("fs")); @@ -91636,7 +91636,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url2}`); + core15.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -91664,7 +91664,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -91677,9 +91677,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -91693,10 +91693,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -91739,7 +91739,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -91761,7 +91761,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -91804,16 +91804,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -97296,7 +97296,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var path13 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -97355,7 +97355,7 @@ var require_cache5 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -97370,8 +97370,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97389,19 +97389,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -97409,16 +97409,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97429,8 +97429,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97448,30 +97448,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -97479,9 +97479,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -97490,7 +97490,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97499,7 +97499,7 @@ var require_cache5 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -97517,26 +97517,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -97549,26 +97549,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -97581,23 +97581,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -97608,16 +97608,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core14.debug(`Failed to reserve cache: ${error3}`); + core15.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -97625,7 +97625,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -97638,21 +97638,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs14 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path13.join(_getTempDirectory(), crypto2.randomUUID()); yield io6.mkdirP(path13.dirname(dest)); - core14.debug(`Downloading ${url2}`); - core14.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url2}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs14.createWriteStream(dest)); - core14.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core15.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs14.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs14.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path13.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs14.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -103228,7 +103228,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/upload-sarif-action.ts -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -107131,11 +107131,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -107570,6 +107567,7 @@ var semver5 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( @@ -107955,7 +107953,7 @@ var fs13 = __toESM(require("fs")); var path12 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/codeql.ts @@ -110905,7 +110903,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -110976,7 +110976,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -111075,13 +111075,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core12.warning(httpError.message || GENERIC_403_MSG); + core13.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core12.warning(httpError.message || GENERIC_404_MSG); + core13.warning(httpError.message || GENERIC_404_MSG); break; default: - core12.warning(httpError.message); + core13.warning(httpError.message); break; } } @@ -111468,7 +111468,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core12.exportVariable(sentinelEnvVar, sentinelEnvVar); + core13.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -111605,11 +111605,11 @@ async function run(startedAt) { } const codeScanningResult = uploadResults["code-scanning" /* CodeScanning */]; if (codeScanningResult !== void 0) { - core13.setOutput("sarif-id", codeScanningResult.sarifID); + core14.setOutput("sarif-id", codeScanningResult.sarifID); } - core13.setOutput("sarif-ids", JSON.stringify(uploadResults)); + core14.setOutput("sarif-ids", JSON.stringify(uploadResults)); if (shouldSkipSarifUpload()) { - core13.debug( + core14.debug( "SARIF upload disabled by an environment variable. Waiting for processing is disabled." ); } else if (getRequiredInput("wait-for-processing") === "true") { @@ -111629,7 +111629,7 @@ async function run(startedAt) { } catch (unwrappedError) { const error3 = isThirdPartyAnalysis("upload-sarif" /* UploadSarif */) && unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError); const message = error3.message; - core13.setFailed(message); + core14.setFailed(message); const errorStatusReportBase = await createStatusReportBase( "upload-sarif" /* UploadSarif */, getActionsStatus(error3), @@ -111652,7 +111652,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core13.setFailed( + core14.setFailed( `codeql/upload-sarif action failed: ${getErrorMessage(error3)}` ); await sendUnhandledErrorStatusReport( diff --git a/src/environment.ts b/src/environment.ts index 75fc3a7de6..ed44ddcff2 100644 --- a/src/environment.ts +++ b/src/environment.ts @@ -47,6 +47,15 @@ export enum EnvVar { /** Whether the init action has been run. */ INIT_ACTION_HAS_RUN = "CODEQL_ACTION_INIT_HAS_RUN", + /** Whether the deprecation warning for file coverage on PRs has been logged. */ + DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION = "CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION", + + /** + * Set to `true` to opt out of the upcoming change that skips file coverage + * information on pull requests. + */ + FILE_COVERAGE_ON_PRS = "CODEQL_ACTION_FILE_COVERAGE_ON_PRS", + /** Whether the error for a deprecated version of the CodeQL Action was logged. */ LOG_VERSION_DEPRECATION = "CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION", diff --git a/src/feature-flags.ts b/src/feature-flags.ts index c773ca9a4c..c65f683be5 100644 --- a/src/feature-flags.ts +++ b/src/feature-flags.ts @@ -329,11 +329,8 @@ export const featureConfig = { [Feature.SkipFileCoverageOnPrs]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. minimumVersion: undefined, + toolsFeature: ToolsFeature.SuppressesMissingFileBaselineWarning, }, [Feature.StartProxyRemoveUnusedRegistries]: { defaultValue: false, diff --git a/src/feature-flags/properties.ts b/src/feature-flags/properties.ts index 7c339e3f3a..8f52e6d9a6 100644 --- a/src/feature-flags/properties.ts +++ b/src/feature-flags/properties.ts @@ -8,12 +8,14 @@ import { RepositoryNwo } from "../repository"; export enum RepositoryPropertyName { DISABLE_OVERLAY = "github-codeql-disable-overlay", EXTRA_QUERIES = "github-codeql-extra-queries", + FILE_COVERAGE_ON_PRS = "github-codeql-file-coverage-on-prs", } /** Parsed types of the known repository properties. */ export type AllRepositoryProperties = { [RepositoryPropertyName.DISABLE_OVERLAY]: boolean; [RepositoryPropertyName.EXTRA_QUERIES]: string; + [RepositoryPropertyName.FILE_COVERAGE_ON_PRS]: boolean; }; /** Parsed repository properties. */ @@ -23,6 +25,7 @@ export type RepositoryProperties = Partial; export type RepositoryPropertyApiType = { [RepositoryPropertyName.DISABLE_OVERLAY]: string; [RepositoryPropertyName.EXTRA_QUERIES]: string; + [RepositoryPropertyName.FILE_COVERAGE_ON_PRS]: string; }; /** The type of functions which take the `value` from the API and try to convert it to the type we want. */ @@ -69,6 +72,7 @@ const repositoryPropertyParsers: { } = { [RepositoryPropertyName.DISABLE_OVERLAY]: booleanProperty, [RepositoryPropertyName.EXTRA_QUERIES]: stringProperty, + [RepositoryPropertyName.FILE_COVERAGE_ON_PRS]: booleanProperty, }; /** diff --git a/src/init-action.ts b/src/init-action.ts index f7d9d52ebe..56902a4379 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -48,6 +48,7 @@ import { checkPacksForOverlayCompatibility, cleanupDatabaseClusterDirectory, getFileCoverageInformationEnabled, + logFileCoverageOnPrsDeprecationWarning, initCodeQL, initConfig, runDatabaseInitCluster, @@ -343,6 +344,14 @@ async function run(startedAt: Date) { analysisKinds = await getAnalysisKinds(logger); const debugMode = getOptionalInput("debug") === "true" || core.isDebug(); + const repositoryProperties = repositoryPropertiesResult.orElse({}); + const fileCoverageResult = await getFileCoverageInformationEnabled( + debugMode, + codeql, + features, + repositoryProperties, + ); + config = await initConfig(features, { analysisKinds, languagesInput: getOptionalInput("languages"), @@ -372,12 +381,8 @@ async function run(startedAt: Date) { githubVersion: gitHubVersion, apiDetails, features, - repositoryProperties: repositoryPropertiesResult.orElse({}), - enableFileCoverageInformation: await getFileCoverageInformationEnabled( - debugMode, - repositoryNwo, - features, - ), + repositoryProperties, + enableFileCoverageInformation: fileCoverageResult.enabled, logger, }); @@ -394,6 +399,21 @@ async function run(startedAt: Date) { ); } + if (fileCoverageResult.enabledByRepositoryProperty) { + addNoLanguageDiagnostic( + config, + makeTelemetryDiagnostic( + "codeql-action/file-coverage-on-prs-enabled-by-repository-property", + "File coverage on PRs enabled by repository property", + {}, + ), + ); + } + + if (fileCoverageResult.showDeprecationWarning) { + logFileCoverageOnPrsDeprecationWarning(logger); + } + await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error = wrapError(unwrappedError); diff --git a/src/init.test.ts b/src/init.test.ts index a7d4f4de17..1f1f0de8dd 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -1,6 +1,8 @@ import * as fs from "fs"; import path from "path"; +import * as core from "@actions/core"; +import * as github from "@actions/github"; import test, { ExecutionContext } from "ava"; import * as sinon from "sinon"; @@ -11,9 +13,9 @@ import { checkPacksForOverlayCompatibility, cleanupDatabaseClusterDirectory, getFileCoverageInformationEnabled, + logFileCoverageOnPrsDeprecationWarning, } from "./init"; import { KnownLanguage } from "./languages"; -import { parseRepositoryNwo } from "./repository"; import { createFeatures, LoggedMessage, @@ -453,13 +455,15 @@ test( ); test("file coverage information enabled when debugMode is true", async (t) => { - t.true( - await getFileCoverageInformationEnabled( - true, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), + const result = await getFileCoverageInformationEnabled( + true, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }); test.serial( @@ -467,43 +471,69 @@ test.serial( async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }, ); test.serial( - "file coverage information enabled when owner is not 'github'", + "file coverage information enabled when feature flag is not enabled, with deprecation warning", async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("other-org/some-repo"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([]), + {}, ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); + t.true(result.showDeprecationWarning); }, ); test.serial( - "file coverage information enabled when feature flag is not enabled", + "file coverage information enabled when repository property is set", async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([]), - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + { + "github-codeql-file-coverage-on-prs": true, + }, ); + t.true(result.enabled); + t.true(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); + }, +); + +test.serial( + "file coverage information enabled when env var opt-out is set", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); + process.env["CODEQL_ACTION_FILE_COVERAGE_ON_PRS"] = "true"; + + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, + ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }, ); @@ -512,12 +542,174 @@ test.serial( async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.false( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, + ); + t.false(result.enabled); + t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); + }, +); + +test.serial( + "file coverage deprecation warning for org-owned repo with default setup recommends repo property", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(true); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-org", type: "Organization" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, create a custom repository property " + + 'with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to ' + + "`true` in the repository's settings.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for org-owned repo with advanced setup recommends env var and repo property", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(false); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-org", type: "Organization" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`. " + + "Alternatively, create a custom repository property " + + 'with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to ' + + "`true` in the repository's settings.", ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for user-owned repo with default setup recommends advanced setup", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(true); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-user", type: "User" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, switch to an advanced setup workflow and " + + "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for user-owned repo with advanced setup recommends env var", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(false); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-user", type: "User" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for unknown owner type with default setup recommends advanced setup", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(true); + github.context.payload = { repository: undefined }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, switch to an advanced setup workflow and " + + "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for unknown owner type with advanced setup recommends env var", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(false); + github.context.payload = { repository: undefined }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "logFileCoverageOnPrsDeprecationWarning does not log if already logged", + (t) => { + process.env["CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION"] = + "true"; + const exportVariableStub = sinon.stub(core, "exportVariable"); + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 0); + t.true(exportVariableStub.notCalled); }, ); diff --git a/src/init.ts b/src/init.ts index a5c8e9ec02..8e3fa21a4a 100644 --- a/src/init.ts +++ b/src/init.ts @@ -1,26 +1,33 @@ import * as fs from "fs"; import * as path from "path"; +import * as core from "@actions/core"; import * as toolrunner from "@actions/exec/lib/toolrunner"; +import * as github from "@actions/github"; import * as io from "@actions/io"; import * as yaml from "js-yaml"; import { getOptionalInput, isAnalyzingPullRequest, + isDefaultSetup, isSelfHostedRunner, } from "./actions-util"; import { GitHubApiDetails } from "./api-client"; import { CodeQL, setupCodeQL } from "./codeql"; import * as configUtils from "./config-utils"; +import { EnvVar } from "./environment"; import { CodeQLDefaultVersionInfo, Feature, FeatureEnablement, } from "./feature-flags"; +import { + RepositoryProperties, + RepositoryPropertyName, +} from "./feature-flags/properties"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; -import { RepositoryNwo } from "./repository"; import { ToolsSource } from "./setup-codeql"; import { ZstdAvailability } from "./tar"; import { ToolsDownloadStatusReport } from "./tools-download"; @@ -300,18 +307,112 @@ export function cleanupDatabaseClusterDirectory( export async function getFileCoverageInformationEnabled( debugMode: boolean, - repositoryNwo: RepositoryNwo, + codeql: CodeQL, features: FeatureEnablement, -): Promise { - return ( - // Always enable file coverage information in debug mode - debugMode || - // We're most interested in speeding up PRs, and we want to keep - // submitting file coverage information for the default branch since - // it is used to populate the status page. - !isAnalyzingPullRequest() || - // For now, restrict this feature to the GitHub org - repositoryNwo.owner !== "github" || - !(await features.getValue(Feature.SkipFileCoverageOnPrs)) - ); + repositoryProperties: RepositoryProperties, +): Promise<{ + enabled: boolean; + enabledByRepositoryProperty: boolean; + showDeprecationWarning: boolean; +}> { + // Always enable file coverage information in debug mode + if (debugMode) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; + } + // We're most interested in speeding up PRs, and we want to keep + // submitting file coverage information for the default branch since + // it is used to populate the status page. + if (!isAnalyzingPullRequest()) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; + } + // If the user has explicitly opted out via an environment variable, don't + // show the deprecation warning. + if ( + (process.env[EnvVar.FILE_COVERAGE_ON_PRS] || "").toLocaleLowerCase() === + "true" + ) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; + } + // Allow repositories to opt in to file coverage information on PRs + // using a repository property. In this case, don't show the deprecation + // warning since the repository has explicitly opted in. + if ( + repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === true + ) { + return { + enabled: true, + enabledByRepositoryProperty: true, + showDeprecationWarning: false, + }; + } + // If the feature is disabled, then maintain the previous behavior of + // unconditionally computing file coverage information, but warn that + // file coverage on PRs will be disabled in a future release. + if (!(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql))) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: true, + }; + } + // Otherwise, disable file coverage information on PRs to speed up analysis. + return { + enabled: false, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; +} + +/** + * Log a warning about the deprecation of file coverage information on PRs, including how to opt + * back in via an environment variable or repository property. + */ +export function logFileCoverageOnPrsDeprecationWarning(logger: Logger): void { + if (process.env[EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION]) { + return; + } + + const repositoryOwnerType: string | undefined = + github.context.payload.repository?.owner.type; + + let message = + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses."; + const envVarOptOut = + "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`."; + const repoPropertyOptOut = + "create a custom repository property with the name " + + '`github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to ' + + "`true` in the repository's settings."; + + if (repositoryOwnerType === "Organization") { + // Org-owned repo: can use the repository property + if (isDefaultSetup()) { + message += `\n\nTo opt out of this change, ${repoPropertyOptOut}`; + } else { + message += `\n\nTo opt out of this change, ${envVarOptOut} Alternatively, ${repoPropertyOptOut}`; + } + } else if (isDefaultSetup()) { + // User-owned repo on default setup: no repo property available and + // no way to set env vars, so need to switch to advanced setup. + message += `\n\nTo opt out of this change, switch to an advanced setup workflow and ${envVarOptOut}`; + } else { + // User-owned repo on advanced setup: can set the env var + message += `\n\nTo opt out of this change, ${envVarOptOut}`; + } + + logger.warning(message); + core.exportVariable(EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION, "true"); } diff --git a/src/tools-features.ts b/src/tools-features.ts index f572264345..5eefbd2276 100644 --- a/src/tools-features.ts +++ b/src/tools-features.ts @@ -10,6 +10,7 @@ export enum ToolsFeature { ForceOverwrite = "forceOverwrite", IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries", PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib", + SuppressesMissingFileBaselineWarning = "suppressesMissingFileBaselineWarning", } /**